diff
stringlengths 262
553k
| is_single_chunk
bool 2
classes | is_single_function
bool 1
class | buggy_function
stringlengths 20
391k
| fixed_function
stringlengths 0
392k
|
---|---|---|---|---|
diff --git a/org.springframework.integration/src/main/java/org/springframework/integration/config/xml/HeaderEnricherParserSupport.java b/org.springframework.integration/src/main/java/org/springframework/integration/config/xml/HeaderEnricherParserSupport.java
index 5e6205f9e3..683a7ab239 100644
--- a/org.springframework.integration/src/main/java/org/springframework/integration/config/xml/HeaderEnricherParserSupport.java
+++ b/org.springframework.integration/src/main/java/org/springframework/integration/config/xml/HeaderEnricherParserSupport.java
@@ -1,136 +1,141 @@
/*
* Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.integration.config.xml;
import java.util.HashMap;
import java.util.Map;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.config.TypedStringValue;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.integration.transformer.HeaderEnricher.ExpressionHolder;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
/**
* Base support class for 'header-enricher' parsers.
*
* @author Mark Fisher
* @since 2.0
*/
public abstract class HeaderEnricherParserSupport extends AbstractTransformerParser {
private final Map<String, String> elementToNameMap = new HashMap<String, String>();
private final Map<String, Class<?>> elementToTypeMap = new HashMap<String, Class<?>>();
@Override
protected final String getTransformerClassName() {
return IntegrationNamespaceUtils.BASE_PACKAGE + ".transformer.HeaderEnricher";
}
protected boolean shouldOverwrite(Element element) {
return "true".equals(element.getAttribute("overwrite").toLowerCase());
}
protected final void addElementToHeaderMapping(String elementName, String headerName) {
this.addElementToHeaderMapping(elementName, headerName, null);
}
protected final void addElementToHeaderMapping(String elementName, String headerName, Class<?> headerType) {
this.elementToNameMap.put(elementName, headerName);
if (headerType != null) {
this.elementToTypeMap.put(elementName, headerType);
}
}
@Override
@SuppressWarnings("unchecked")
protected void parseTransformer(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
ManagedMap headers = new ManagedMap();
this.processHeaders(element, headers, parserContext);
builder.addConstructorArgValue(headers);
builder.addPropertyValue("overwrite", this.shouldOverwrite(element)); // TODO: should be a per-header config setting
}
protected void processHeaders(Element element, ManagedMap<String, Object> headers, ParserContext parserContext) {
NodeList childNodes = element.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node node = childNodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
String headerName = null;
Element headerElement = (Element) node;
String elementName = node.getLocalName();
Class<?> headerType = null;
if ("header".equals(elementName)) {
headerName = headerElement.getAttribute("name");
}
else {
headerName = elementToNameMap.get(elementName);
headerType = elementToTypeMap.get(elementName);
+ if (headerType != null && StringUtils.hasText(headerElement.getAttribute("type"))) {
+ parserContext.getReaderContext().error("The " + elementName
+ + " header does not accept a 'type' attribute. The required type is ["
+ + headerType.getName() + "]", element);
+ }
}
if (headerType == null) {
String headerTypeName = headerElement.getAttribute("type");
if (StringUtils.hasText(headerTypeName)) {
ClassLoader classLoader = parserContext.getReaderContext().getBeanClassLoader();
if (classLoader == null) {
classLoader = getClass().getClassLoader();
}
try {
headerType = ClassUtils.forName(headerTypeName, classLoader);
}
catch (Exception e) {
parserContext.getReaderContext().error("unable to resolve type [" +
headerTypeName + "] for header '" + headerName + "'", element, e);
}
}
}
if (headerName != null) {
String value = headerElement.getAttribute("value");
String ref = headerElement.getAttribute("ref");
String expression = headerElement.getAttribute("expression");
boolean isValue = StringUtils.hasText(value);
boolean isRef = StringUtils.hasText(ref);
boolean isExpression = StringUtils.hasText(expression);
if (!(isValue ^ (isRef ^ isExpression))) {
parserContext.getReaderContext().error(
"Exactly one of the 'ref', 'value', or 'expression' attributes is required.", element);
}
if (isValue) {
Object headerValue = (headerType != null) ?
new TypedStringValue(value, headerType) : value;
headers.put(headerName, headerValue);
}
else if (isExpression) {
headers.put(headerName, new ExpressionHolder(expression, headerType));
}
else {
headers.put(headerName, new RuntimeBeanReference(ref));
}
}
}
}
}
}
| true | true | protected void processHeaders(Element element, ManagedMap<String, Object> headers, ParserContext parserContext) {
NodeList childNodes = element.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node node = childNodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
String headerName = null;
Element headerElement = (Element) node;
String elementName = node.getLocalName();
Class<?> headerType = null;
if ("header".equals(elementName)) {
headerName = headerElement.getAttribute("name");
}
else {
headerName = elementToNameMap.get(elementName);
headerType = elementToTypeMap.get(elementName);
}
if (headerType == null) {
String headerTypeName = headerElement.getAttribute("type");
if (StringUtils.hasText(headerTypeName)) {
ClassLoader classLoader = parserContext.getReaderContext().getBeanClassLoader();
if (classLoader == null) {
classLoader = getClass().getClassLoader();
}
try {
headerType = ClassUtils.forName(headerTypeName, classLoader);
}
catch (Exception e) {
parserContext.getReaderContext().error("unable to resolve type [" +
headerTypeName + "] for header '" + headerName + "'", element, e);
}
}
}
if (headerName != null) {
String value = headerElement.getAttribute("value");
String ref = headerElement.getAttribute("ref");
String expression = headerElement.getAttribute("expression");
boolean isValue = StringUtils.hasText(value);
boolean isRef = StringUtils.hasText(ref);
boolean isExpression = StringUtils.hasText(expression);
if (!(isValue ^ (isRef ^ isExpression))) {
parserContext.getReaderContext().error(
"Exactly one of the 'ref', 'value', or 'expression' attributes is required.", element);
}
if (isValue) {
Object headerValue = (headerType != null) ?
new TypedStringValue(value, headerType) : value;
headers.put(headerName, headerValue);
}
else if (isExpression) {
headers.put(headerName, new ExpressionHolder(expression, headerType));
}
else {
headers.put(headerName, new RuntimeBeanReference(ref));
}
}
}
}
}
| protected void processHeaders(Element element, ManagedMap<String, Object> headers, ParserContext parserContext) {
NodeList childNodes = element.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node node = childNodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
String headerName = null;
Element headerElement = (Element) node;
String elementName = node.getLocalName();
Class<?> headerType = null;
if ("header".equals(elementName)) {
headerName = headerElement.getAttribute("name");
}
else {
headerName = elementToNameMap.get(elementName);
headerType = elementToTypeMap.get(elementName);
if (headerType != null && StringUtils.hasText(headerElement.getAttribute("type"))) {
parserContext.getReaderContext().error("The " + elementName
+ " header does not accept a 'type' attribute. The required type is ["
+ headerType.getName() + "]", element);
}
}
if (headerType == null) {
String headerTypeName = headerElement.getAttribute("type");
if (StringUtils.hasText(headerTypeName)) {
ClassLoader classLoader = parserContext.getReaderContext().getBeanClassLoader();
if (classLoader == null) {
classLoader = getClass().getClassLoader();
}
try {
headerType = ClassUtils.forName(headerTypeName, classLoader);
}
catch (Exception e) {
parserContext.getReaderContext().error("unable to resolve type [" +
headerTypeName + "] for header '" + headerName + "'", element, e);
}
}
}
if (headerName != null) {
String value = headerElement.getAttribute("value");
String ref = headerElement.getAttribute("ref");
String expression = headerElement.getAttribute("expression");
boolean isValue = StringUtils.hasText(value);
boolean isRef = StringUtils.hasText(ref);
boolean isExpression = StringUtils.hasText(expression);
if (!(isValue ^ (isRef ^ isExpression))) {
parserContext.getReaderContext().error(
"Exactly one of the 'ref', 'value', or 'expression' attributes is required.", element);
}
if (isValue) {
Object headerValue = (headerType != null) ?
new TypedStringValue(value, headerType) : value;
headers.put(headerName, headerValue);
}
else if (isExpression) {
headers.put(headerName, new ExpressionHolder(expression, headerType));
}
else {
headers.put(headerName, new RuntimeBeanReference(ref));
}
}
}
}
}
|
diff --git a/src/ca/cumulonimbus/pressurenetsdk/CbService.java b/src/ca/cumulonimbus/pressurenetsdk/CbService.java
index c65cc6e..0f6b8f4 100644
--- a/src/ca/cumulonimbus/pressurenetsdk/CbService.java
+++ b/src/ca/cumulonimbus/pressurenetsdk/CbService.java
@@ -1,795 +1,795 @@
package ca.cumulonimbus.pressurenetsdk;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Date;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.location.Location;
import android.net.ConnectivityManager;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.os.SystemClock;
import android.provider.Settings.Secure;
import android.widget.TextView;
/**
* Represent developer-facing pressureNET API Background task; manage and run
* everything Handle Intents
*
* @author jacob
*
*/
public class CbService extends Service {
private CbDataCollector dataCollector;
private CbLocationManager locationManager;
private CbSettingsHandler settingsHandler;
private CbDb db;
public CbService service = this;
private String mAppDir;
IBinder mBinder;
ReadingSender sender;
String serverURL = "https://pressurenet.cumulonimbus.ca/";
// Service Interaction API Messages
public static final int MSG_OKAY = 0;
public static final int MSG_STOP = 1;
public static final int MSG_GET_BEST_LOCATION = 2;
public static final int MSG_BEST_LOCATION = 3;
public static final int MSG_GET_BEST_PRESSURE = 4;
public static final int MSG_BEST_PRESSURE = 5;
public static final int MSG_START_AUTOSUBMIT = 6;
public static final int MSG_STOP_AUTOSUBMIT = 7;
public static final int MSG_SET_SETTINGS = 8;
public static final int MSG_GET_SETTINGS = 9;
public static final int MSG_SETTINGS = 10;
public static final int MSG_START_DATA_STREAM = 11;
public static final int MSG_DATA_STREAM = 12;
public static final int MSG_STOP_DATA_STREAM = 13;
// pressureNET Live API
public static final int MSG_GET_LOCAL_RECENTS = 14;
public static final int MSG_LOCAL_RECENTS = 15;
public static final int MSG_GET_API_RECENTS = 16;
public static final int MSG_API_RECENTS = 17;
public static final int MSG_MAKE_API_CALL = 18;
public static final int MSG_API_RESULT_COUNT = 19;
// pressureNET API Cache
public static final int MSG_CLEAR_LOCAL_CACHE = 20;
public static final int MSG_REMOVE_FROM_PRESSURENET = 21;
public static final int MSG_CLEAR_API_CACHE = 22;
// Current Conditions
public static final int MSG_ADD_CURRENT_CONDITION = 23;
public static final int MSG_GET_CURRENT_CONDITIONS = 24;
public static final int MSG_CURRENT_CONDITIONS = 25;
// Sending Data
public static final int MSG_SEND_OBSERVATION = 26;
public static final int MSG_SEND_CURRENT_CONDITION = 27;
// Current Conditions API
public static final int MSG_MAKE_CURRENT_CONDITIONS_API_CALL = 28;
long lastAPICall = System.currentTimeMillis();
private final Handler mHandler = new Handler();
Messenger mMessenger = new Messenger(new IncomingHandler());
/**
* Find all the data for an observation.
*
* Location, Measurement values, etc.
*
* @return
*/
public CbObservation collectNewObservation() {
try {
CbObservation pressureObservation = new CbObservation();
log("cb collecting new observation");
// Location values
locationManager = new CbLocationManager(getApplicationContext());
locationManager.startGettingLocations();
// Measurement values
pressureObservation = dataCollector.getPressureObservation();
pressureObservation.setLocation(locationManager
.getCurrentBestLocation());
// stop listening for locations
locationManager.stopGettingLocations();
return pressureObservation;
} catch (Exception e) {
e.printStackTrace();
return new CbObservation();
}
}
/**
* Collect and send data in a different thread. This runs itself every
* "settingsHandler.getDataCollectionFrequency()" milliseconds
*/
private class ReadingSender implements Runnable {
public void run() {
log("collecting and submitting " + settingsHandler.getServerURL());
long base = SystemClock.uptimeMillis();
dataCollector.startCollectingData(null);
CbObservation singleObservation = new CbObservation();
if (settingsHandler.isCollectingData()) {
// Collect
singleObservation = collectNewObservation();
if (singleObservation.getObservationValue() != 0.0) {
// Store in database
db.open();
long count = db.addObservation(singleObservation);
db.close();
try {
if (settingsHandler.isSharingData()) {
// Send if we're online
if (isNetworkAvailable()) {
log("online and sending");
singleObservation.setClientKey(getApplicationContext().getPackageName());
sendCbObservation(singleObservation);
} else {
log("didn't send");
// TODO: mark as not sent, send later
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
log("tried collecting, reading zero");
}
mHandler.postAtTime(this,
base + (settingsHandler.getDataCollectionFrequency()));
}
};
public boolean isNetworkAvailable() {
log("is net available?");
ConnectivityManager cm = (ConnectivityManager) this
.getSystemService(Context.CONNECTIVITY_SERVICE);
// test for connection
if (cm.getActiveNetworkInfo() != null
&& cm.getActiveNetworkInfo().isAvailable()
&& cm.getActiveNetworkInfo().isConnected()) {
log("yes");
return true;
} else {
log("no");
return false;
}
}
/**
* Stop all listeners, active sensors, etc, and shut down.
*
*/
public void stopAutoSubmit() {
if (locationManager != null) {
locationManager.stopGettingLocations();
}
if (dataCollector != null) {
dataCollector.stopCollectingData();
}
mHandler.removeCallbacks(sender);
}
/**
* Send the observation to the server
*
* @param observation
* @return
*/
public boolean sendCbObservation(CbObservation observation) {
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector );
sender.execute(observation.getObservationAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Send a new account to the server
*
* @param account
* @return
*/
public boolean sendCbAccount(CbAccount account) {
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector);
sender.execute(account.getAccountAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Send the current condition to the server
*
* @param observation
* @return
*/
public boolean sendCbCurrentCondition(CbCurrentCondition condition) {
log("sending cbcurrent condition");
try {
CbDataSender sender = new CbDataSender(getApplicationContext());
sender.setSettings(settingsHandler, locationManager, dataCollector );
sender.execute(condition.getCurrentConditionAsParams());
return true;
} catch (Exception e) {
return false;
}
}
/**
* Start the periodic data collection.
*/
public void startAutoSubmit() {
log("CbService: Starting to auto-collect and submit data.");
sender = new ReadingSender();
mHandler.post(sender);
}
@Override
public void onDestroy() {
log("on destroy");
stopAutoSubmit();
super.onDestroy();
}
@Override
public void onCreate() {
setUpFiles();
log("cb on create");
db = new CbDb(getApplicationContext());
super.onCreate();
}
/**
* Start running background data collection methods.
*
*/
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
log("cb onstartcommand");
// Check the intent for Settings initialization
dataCollector = new CbDataCollector(getID(), getApplicationContext());
if (intent != null) {
startWithIntent(intent);
return START_STICKY;
} else {
log("INTENT NULL; checking db");
startWithDatabase();
}
super.onStartCommand(intent, flags, startId);
return START_STICKY;
}
public void startWithIntent(Intent intent) {
try {
settingsHandler = new CbSettingsHandler(getApplicationContext());
settingsHandler.setServerURL(serverURL);
settingsHandler.setAppID(getID());
// Seems like new settings. Try adding to the db.
settingsHandler.saveSettings();
// are we creating a new user?
if (intent.hasExtra("add_account")) {
log("adding new user");
CbAccount account = new CbAccount();
account.setEmail(intent.getStringExtra("email"));
account.setTimeRegistered(intent.getLongExtra("time", 0));
account.setUserID(intent.getStringExtra("userID"));
sendCbAccount(account);
}
// Start a new thread and return
startAutoSubmit();
} catch (Exception e) {
for (StackTraceElement ste : e.getStackTrace()) {
log(ste.getMethodName() + ste.getLineNumber());
}
}
}
public void startWithDatabase() {
try {
db.open();
// Check the database for Settings initialization
settingsHandler = new CbSettingsHandler(getApplicationContext());
// db.clearDb();
Cursor allSettings = db.fetchAllSettings();
log("cb intent null; checking db, size " + allSettings.getCount());
while (allSettings.moveToNext()) {
settingsHandler.setAppID(allSettings.getString(1));
settingsHandler.setDataCollectionFrequency(allSettings
.getLong(2));
settingsHandler.setServerURL(allSettings.getString(3));
startAutoSubmit();
// but just once
break;
}
db.close();
} catch (Exception e) {
for (StackTraceElement ste : e.getStackTrace()) {
log(ste.getMethodName() + ste.getLineNumber());
}
}
}
/**
* Handler of incoming messages from clients.
*/
class IncomingHandler extends Handler {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_STOP:
log("message. bound service says stop");
stopAutoSubmit();
break;
case MSG_GET_BEST_LOCATION:
log("message. bound service requesting location");
if (locationManager != null) {
Location best = locationManager.getCurrentBestLocation();
try {
log("service sending best location");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_LOCATION, best));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: location null, not returning");
}
break;
case MSG_GET_BEST_PRESSURE:
log("message. bound service requesting pressure");
if (dataCollector != null) {
CbObservation pressure = dataCollector
.getPressureObservation();
try {
log("service sending best pressure");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_PRESSURE, pressure));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: data collector null, not returning");
}
break;
case MSG_START_AUTOSUBMIT:
log("start autosubmit");
startWithDatabase();
break;
case MSG_STOP_AUTOSUBMIT:
log("stop autosubmit");
stopAutoSubmit();
break;
case MSG_GET_SETTINGS:
log("get settings");
try {
msg.replyTo.send(Message.obtain(null, MSG_SETTINGS,
settingsHandler));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_START_DATA_STREAM:
startDataStream(msg.replyTo);
break;
case MSG_STOP_DATA_STREAM:
stopDataStream();
break;
case MSG_SET_SETTINGS:
log("set settings");
CbSettingsHandler newSettings = (CbSettingsHandler) msg.obj;
newSettings.saveSettings();
break;
case MSG_GET_LOCAL_RECENTS:
log("get recents");
CbApiCall apiCall = (CbApiCall) msg.obj;
System.out.println(apiCall);
if(apiCall == null) {
System.out.println("apicall null, bailing");
break;
}
// run API call
db.open();
Cursor cursor = db.runLocalAPICall(apiCall.getMinLat(),
apiCall.getMaxLat(), apiCall.getMinLon(),
apiCall.getMaxLon(), apiCall.getStartTime(),
apiCall.getEndTime(), 2000);
System.out.println("local api cursor count " + cursor.getCount());
ArrayList<CbObservation> results = new ArrayList<CbObservation>();
while (cursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cursor.getDouble(1));
location.setLongitude(cursor.getDouble(2));
location.setAltitude(cursor.getDouble(3));
location.setAccuracy(cursor.getInt(4));
location.setProvider(cursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cursor.getString(6));
obs.setObservationUnit(cursor.getString(7));
obs.setObservationValue(cursor.getDouble(8));
obs.setSharing(cursor.getString(9));
- obs.setTime(cursor.getInt(10));
+ obs.setTime(cursor.getLong(10));
obs.setTimeZoneOffset(cursor.getInt(11));
obs.setUser_id(cursor.getString(12));
obs.setTrend(cursor.getString(18));
// TODO: Add sensor information
results.add(obs);
}
db.close();
log("cbservice: " + results.size() + " local api results");
try {
msg.replyTo.send(Message.obtain(null, MSG_LOCAL_RECENTS,
results));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_GET_API_RECENTS:
log("get api recents");
CbApiCall apiCacheCall = (CbApiCall) msg.obj;
log(apiCacheCall.toString());
// run API call
db.open();
Cursor cacheCursor = db.runAPICacheCall(
apiCacheCall.getMinLat(), apiCacheCall.getMaxLat(),
apiCacheCall.getMinLon(), apiCacheCall.getMaxLon(),
apiCacheCall.getStartTime(), apiCacheCall.getEndTime(),
2000);
ArrayList<CbObservation> cacheResults = new ArrayList<CbObservation>();
System.out.println("cache cursor count " + cacheCursor.getCount());
while (cacheCursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cacheCursor.getDouble(1));
location.setLongitude(cacheCursor.getDouble(2));
location.setAltitude(cacheCursor.getDouble(3));
location.setAccuracy(cacheCursor.getInt(4));
location.setProvider(cacheCursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cacheCursor.getString(6));
obs.setObservationUnit(cacheCursor.getString(7));
obs.setObservationValue(cacheCursor.getDouble(8));
obs.setSharing(cacheCursor.getString(9));
obs.setTime(cacheCursor.getLong(10));
obs.setTimeZoneOffset(cacheCursor.getLong(11));
obs.setUser_id(cacheCursor.getString(12));
obs.setTrend(cacheCursor.getString(18));
// TODO: Add sensor information
cacheResults.add(obs);
}
try {
msg.replyTo.send(Message.obtain(null, MSG_API_RECENTS,
cacheResults));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_MAKE_API_CALL:
CbApi api = new CbApi(getApplicationContext());
CbApiCall liveApiCall = (CbApiCall) msg.obj;
long timeDiff = System.currentTimeMillis() - lastAPICall;
if(timeDiff > 1000 * 3) {
lastAPICall = api.makeAPICall(liveApiCall, service, msg.replyTo);
} else {
log("service asked for api call, time diff too short " + timeDiff);
}
break;
case MSG_CLEAR_LOCAL_CACHE:
db.open();
db.clearLocalCache();
db.close();
break;
case MSG_REMOVE_FROM_PRESSURENET:
// TODO: Implement
break;
case MSG_CLEAR_API_CACHE:
db.open();
db.clearAPICache();
db.close();
break;
case MSG_ADD_CURRENT_CONDITION:
CbCurrentCondition cc = (CbCurrentCondition) msg.obj;
db.open();
db.addCondition(cc);
db.close();
break;
case MSG_GET_CURRENT_CONDITIONS:
db.open();
CbApiCall currentConditionAPI = (CbApiCall) msg.obj;
Cursor ccCursor = db.getCurrentConditions(
currentConditionAPI.getMinLat(),
currentConditionAPI.getMaxLat(),
currentConditionAPI.getMinLon(),
currentConditionAPI.getMaxLon(),
currentConditionAPI.getStartTime(),
currentConditionAPI.getEndTime(),
1000);
ArrayList<CbCurrentCondition> conditions = new ArrayList<CbCurrentCondition>();
while(ccCursor.moveToNext()) {
CbCurrentCondition cur = new CbCurrentCondition();
Location location = new Location("network");
location.setLatitude(ccCursor.getDouble(1));
location.setLongitude(ccCursor.getDouble(2));
location.setAltitude(ccCursor.getDouble(3));
location.setAccuracy(ccCursor.getInt(4));
location.setProvider(ccCursor.getString(5));
cur.setLocation(location);
cur.setTime(ccCursor.getLong(6));
cur.setTime(ccCursor.getLong(7));
cur.setUser_id(ccCursor.getString(9));
cur.setGeneral_condition(ccCursor.getString(10));
cur.setWindy(ccCursor.getString(11));
cur.setFog_thickness(ccCursor.getString(12));
cur.setCloud_type(ccCursor.getString(13));
cur.setPrecipitation_type(ccCursor.getString(14));
cur.setPrecipitation_amount(ccCursor.getDouble(15));
cur.setPrecipitation_unit(ccCursor.getString(16));
cur.setThunderstorm_intensity(ccCursor.getString(17));
cur.setUser_comment(ccCursor.getString(18));
conditions.add(cur);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null, MSG_CURRENT_CONDITIONS,
conditions));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_SEND_CURRENT_CONDITION:
CbCurrentCondition condition = (CbCurrentCondition) msg.obj;
sendCbCurrentCondition(condition);
break;
case MSG_SEND_OBSERVATION:
// TODO: Implement
break;
case MSG_MAKE_CURRENT_CONDITIONS_API_CALL:
CbApi conditionApi = new CbApi(getApplicationContext());
CbApiCall conditionApiCall = (CbApiCall) msg.obj;
conditionApiCall.setCallType("Conditions");
conditionApi.makeAPICall(conditionApiCall, service, msg.replyTo);
break;
default:
super.handleMessage(msg);
}
}
}
public boolean notifyAPIResult(Messenger reply, int count) {
try {
if (reply == null) {
System.out.println("cannot notify, reply is null");
} else {
reply.send(Message.obtain(null, MSG_API_RESULT_COUNT,
count, 0));
}
} catch (RemoteException re) {
re.printStackTrace();
} catch (NullPointerException npe) {
npe.printStackTrace();
}
return false;
}
public CbObservation recentPressureFromDatabase() {
CbObservation obs = new CbObservation();
long rowId = db.fetchObservationMaxID();
double pressure = 0.0;
Cursor c = db.fetchObservation(rowId);
while (c.moveToNext()) {
pressure = c.getDouble(8);
}
log(pressure + " pressure from db");
if (pressure == 0.0) {
log("returning null");
return null;
}
obs.setObservationValue(pressure);
return obs;
}
private class StreamObservation extends AsyncTask<Messenger, Void, String> {
@Override
protected String doInBackground(Messenger... m) {
try {
for (Messenger msgr : m) {
if (msgr != null) {
msgr.send(Message.obtain(null, MSG_DATA_STREAM,
recentPressureFromDatabase()));
} else {
log("messenger is null");
}
}
} catch (RemoteException re) {
re.printStackTrace();
}
return "--";
}
@Override
protected void onPostExecute(String result) {
}
@Override
protected void onPreExecute() {
}
@Override
protected void onProgressUpdate(Void... values) {
}
}
public void startDataStream(Messenger m) {
log("cbService starting stream " + (m == null));
dataCollector.startCollectingData(m);
new StreamObservation().execute(m);
}
public void stopDataStream() {
log("cbservice stopping stream");
dataCollector.stopCollectingData();
}
/**
* Get a hash'd device ID
*
* @return
*/
public String getID() {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
String actual_id = Secure.getString(getApplicationContext()
.getContentResolver(), Secure.ANDROID_ID);
byte[] bytes = actual_id.getBytes();
byte[] digest = md.digest(bytes);
StringBuffer hexString = new StringBuffer();
for (int i = 0; i < digest.length; i++) {
hexString.append(Integer.toHexString(0xFF & digest[i]));
}
return hexString.toString();
} catch (Exception e) {
return "--";
}
}
// Used to write a log to SD card. Not used unless logging enabled.
public void setUpFiles() {
try {
File homeDirectory = getExternalFilesDir(null);
if (homeDirectory != null) {
mAppDir = homeDirectory.getAbsolutePath();
}
} catch (Exception e) {
e.printStackTrace();
}
}
// Log data to SD card for debug purposes.
// To enable logging, ensure the Manifest allows writing to SD card.
public void logToFile(String text) {
try {
OutputStream output = new FileOutputStream(mAppDir + "/log.txt",
true);
String logString = (new Date()).toString() + ": " + text + "\n";
output.write(logString.getBytes());
output.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
@Override
public IBinder onBind(Intent intent) {
log("on bind");
return mMessenger.getBinder();
}
@Override
public void onRebind(Intent intent) {
log("on rebind");
super.onRebind(intent);
}
public void log(String message) {
// logToFile(message);
System.out.println(message);
}
public CbDataCollector getDataCollector() {
return dataCollector;
}
public void setDataCollector(CbDataCollector dataCollector) {
this.dataCollector = dataCollector;
}
public CbLocationManager getLocationManager() {
return locationManager;
}
public void setLocationManager(CbLocationManager locationManager) {
this.locationManager = locationManager;
}
}
| true | true | public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_STOP:
log("message. bound service says stop");
stopAutoSubmit();
break;
case MSG_GET_BEST_LOCATION:
log("message. bound service requesting location");
if (locationManager != null) {
Location best = locationManager.getCurrentBestLocation();
try {
log("service sending best location");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_LOCATION, best));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: location null, not returning");
}
break;
case MSG_GET_BEST_PRESSURE:
log("message. bound service requesting pressure");
if (dataCollector != null) {
CbObservation pressure = dataCollector
.getPressureObservation();
try {
log("service sending best pressure");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_PRESSURE, pressure));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: data collector null, not returning");
}
break;
case MSG_START_AUTOSUBMIT:
log("start autosubmit");
startWithDatabase();
break;
case MSG_STOP_AUTOSUBMIT:
log("stop autosubmit");
stopAutoSubmit();
break;
case MSG_GET_SETTINGS:
log("get settings");
try {
msg.replyTo.send(Message.obtain(null, MSG_SETTINGS,
settingsHandler));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_START_DATA_STREAM:
startDataStream(msg.replyTo);
break;
case MSG_STOP_DATA_STREAM:
stopDataStream();
break;
case MSG_SET_SETTINGS:
log("set settings");
CbSettingsHandler newSettings = (CbSettingsHandler) msg.obj;
newSettings.saveSettings();
break;
case MSG_GET_LOCAL_RECENTS:
log("get recents");
CbApiCall apiCall = (CbApiCall) msg.obj;
System.out.println(apiCall);
if(apiCall == null) {
System.out.println("apicall null, bailing");
break;
}
// run API call
db.open();
Cursor cursor = db.runLocalAPICall(apiCall.getMinLat(),
apiCall.getMaxLat(), apiCall.getMinLon(),
apiCall.getMaxLon(), apiCall.getStartTime(),
apiCall.getEndTime(), 2000);
System.out.println("local api cursor count " + cursor.getCount());
ArrayList<CbObservation> results = new ArrayList<CbObservation>();
while (cursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cursor.getDouble(1));
location.setLongitude(cursor.getDouble(2));
location.setAltitude(cursor.getDouble(3));
location.setAccuracy(cursor.getInt(4));
location.setProvider(cursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cursor.getString(6));
obs.setObservationUnit(cursor.getString(7));
obs.setObservationValue(cursor.getDouble(8));
obs.setSharing(cursor.getString(9));
obs.setTime(cursor.getInt(10));
obs.setTimeZoneOffset(cursor.getInt(11));
obs.setUser_id(cursor.getString(12));
obs.setTrend(cursor.getString(18));
// TODO: Add sensor information
results.add(obs);
}
db.close();
log("cbservice: " + results.size() + " local api results");
try {
msg.replyTo.send(Message.obtain(null, MSG_LOCAL_RECENTS,
results));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_GET_API_RECENTS:
log("get api recents");
CbApiCall apiCacheCall = (CbApiCall) msg.obj;
log(apiCacheCall.toString());
// run API call
db.open();
Cursor cacheCursor = db.runAPICacheCall(
apiCacheCall.getMinLat(), apiCacheCall.getMaxLat(),
apiCacheCall.getMinLon(), apiCacheCall.getMaxLon(),
apiCacheCall.getStartTime(), apiCacheCall.getEndTime(),
2000);
ArrayList<CbObservation> cacheResults = new ArrayList<CbObservation>();
System.out.println("cache cursor count " + cacheCursor.getCount());
while (cacheCursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cacheCursor.getDouble(1));
location.setLongitude(cacheCursor.getDouble(2));
location.setAltitude(cacheCursor.getDouble(3));
location.setAccuracy(cacheCursor.getInt(4));
location.setProvider(cacheCursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cacheCursor.getString(6));
obs.setObservationUnit(cacheCursor.getString(7));
obs.setObservationValue(cacheCursor.getDouble(8));
obs.setSharing(cacheCursor.getString(9));
obs.setTime(cacheCursor.getLong(10));
obs.setTimeZoneOffset(cacheCursor.getLong(11));
obs.setUser_id(cacheCursor.getString(12));
obs.setTrend(cacheCursor.getString(18));
// TODO: Add sensor information
cacheResults.add(obs);
}
try {
msg.replyTo.send(Message.obtain(null, MSG_API_RECENTS,
cacheResults));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_MAKE_API_CALL:
CbApi api = new CbApi(getApplicationContext());
CbApiCall liveApiCall = (CbApiCall) msg.obj;
long timeDiff = System.currentTimeMillis() - lastAPICall;
if(timeDiff > 1000 * 3) {
lastAPICall = api.makeAPICall(liveApiCall, service, msg.replyTo);
} else {
log("service asked for api call, time diff too short " + timeDiff);
}
break;
case MSG_CLEAR_LOCAL_CACHE:
db.open();
db.clearLocalCache();
db.close();
break;
case MSG_REMOVE_FROM_PRESSURENET:
// TODO: Implement
break;
case MSG_CLEAR_API_CACHE:
db.open();
db.clearAPICache();
db.close();
break;
case MSG_ADD_CURRENT_CONDITION:
CbCurrentCondition cc = (CbCurrentCondition) msg.obj;
db.open();
db.addCondition(cc);
db.close();
break;
case MSG_GET_CURRENT_CONDITIONS:
db.open();
CbApiCall currentConditionAPI = (CbApiCall) msg.obj;
Cursor ccCursor = db.getCurrentConditions(
currentConditionAPI.getMinLat(),
currentConditionAPI.getMaxLat(),
currentConditionAPI.getMinLon(),
currentConditionAPI.getMaxLon(),
currentConditionAPI.getStartTime(),
currentConditionAPI.getEndTime(),
1000);
ArrayList<CbCurrentCondition> conditions = new ArrayList<CbCurrentCondition>();
while(ccCursor.moveToNext()) {
CbCurrentCondition cur = new CbCurrentCondition();
Location location = new Location("network");
location.setLatitude(ccCursor.getDouble(1));
location.setLongitude(ccCursor.getDouble(2));
location.setAltitude(ccCursor.getDouble(3));
location.setAccuracy(ccCursor.getInt(4));
location.setProvider(ccCursor.getString(5));
cur.setLocation(location);
cur.setTime(ccCursor.getLong(6));
cur.setTime(ccCursor.getLong(7));
cur.setUser_id(ccCursor.getString(9));
cur.setGeneral_condition(ccCursor.getString(10));
cur.setWindy(ccCursor.getString(11));
cur.setFog_thickness(ccCursor.getString(12));
cur.setCloud_type(ccCursor.getString(13));
cur.setPrecipitation_type(ccCursor.getString(14));
cur.setPrecipitation_amount(ccCursor.getDouble(15));
cur.setPrecipitation_unit(ccCursor.getString(16));
cur.setThunderstorm_intensity(ccCursor.getString(17));
cur.setUser_comment(ccCursor.getString(18));
conditions.add(cur);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null, MSG_CURRENT_CONDITIONS,
conditions));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_SEND_CURRENT_CONDITION:
CbCurrentCondition condition = (CbCurrentCondition) msg.obj;
sendCbCurrentCondition(condition);
break;
case MSG_SEND_OBSERVATION:
// TODO: Implement
break;
case MSG_MAKE_CURRENT_CONDITIONS_API_CALL:
CbApi conditionApi = new CbApi(getApplicationContext());
CbApiCall conditionApiCall = (CbApiCall) msg.obj;
conditionApiCall.setCallType("Conditions");
conditionApi.makeAPICall(conditionApiCall, service, msg.replyTo);
break;
default:
super.handleMessage(msg);
}
}
| public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_STOP:
log("message. bound service says stop");
stopAutoSubmit();
break;
case MSG_GET_BEST_LOCATION:
log("message. bound service requesting location");
if (locationManager != null) {
Location best = locationManager.getCurrentBestLocation();
try {
log("service sending best location");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_LOCATION, best));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: location null, not returning");
}
break;
case MSG_GET_BEST_PRESSURE:
log("message. bound service requesting pressure");
if (dataCollector != null) {
CbObservation pressure = dataCollector
.getPressureObservation();
try {
log("service sending best pressure");
msg.replyTo.send(Message.obtain(null,
MSG_BEST_PRESSURE, pressure));
} catch (RemoteException re) {
re.printStackTrace();
}
} else {
log("error: data collector null, not returning");
}
break;
case MSG_START_AUTOSUBMIT:
log("start autosubmit");
startWithDatabase();
break;
case MSG_STOP_AUTOSUBMIT:
log("stop autosubmit");
stopAutoSubmit();
break;
case MSG_GET_SETTINGS:
log("get settings");
try {
msg.replyTo.send(Message.obtain(null, MSG_SETTINGS,
settingsHandler));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_START_DATA_STREAM:
startDataStream(msg.replyTo);
break;
case MSG_STOP_DATA_STREAM:
stopDataStream();
break;
case MSG_SET_SETTINGS:
log("set settings");
CbSettingsHandler newSettings = (CbSettingsHandler) msg.obj;
newSettings.saveSettings();
break;
case MSG_GET_LOCAL_RECENTS:
log("get recents");
CbApiCall apiCall = (CbApiCall) msg.obj;
System.out.println(apiCall);
if(apiCall == null) {
System.out.println("apicall null, bailing");
break;
}
// run API call
db.open();
Cursor cursor = db.runLocalAPICall(apiCall.getMinLat(),
apiCall.getMaxLat(), apiCall.getMinLon(),
apiCall.getMaxLon(), apiCall.getStartTime(),
apiCall.getEndTime(), 2000);
System.out.println("local api cursor count " + cursor.getCount());
ArrayList<CbObservation> results = new ArrayList<CbObservation>();
while (cursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cursor.getDouble(1));
location.setLongitude(cursor.getDouble(2));
location.setAltitude(cursor.getDouble(3));
location.setAccuracy(cursor.getInt(4));
location.setProvider(cursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cursor.getString(6));
obs.setObservationUnit(cursor.getString(7));
obs.setObservationValue(cursor.getDouble(8));
obs.setSharing(cursor.getString(9));
obs.setTime(cursor.getLong(10));
obs.setTimeZoneOffset(cursor.getInt(11));
obs.setUser_id(cursor.getString(12));
obs.setTrend(cursor.getString(18));
// TODO: Add sensor information
results.add(obs);
}
db.close();
log("cbservice: " + results.size() + " local api results");
try {
msg.replyTo.send(Message.obtain(null, MSG_LOCAL_RECENTS,
results));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_GET_API_RECENTS:
log("get api recents");
CbApiCall apiCacheCall = (CbApiCall) msg.obj;
log(apiCacheCall.toString());
// run API call
db.open();
Cursor cacheCursor = db.runAPICacheCall(
apiCacheCall.getMinLat(), apiCacheCall.getMaxLat(),
apiCacheCall.getMinLon(), apiCacheCall.getMaxLon(),
apiCacheCall.getStartTime(), apiCacheCall.getEndTime(),
2000);
ArrayList<CbObservation> cacheResults = new ArrayList<CbObservation>();
System.out.println("cache cursor count " + cacheCursor.getCount());
while (cacheCursor.moveToNext()) {
// TODO: This is duplicated in CbDataCollector. Fix that
CbObservation obs = new CbObservation();
Location location = new Location("network");
location.setLatitude(cacheCursor.getDouble(1));
location.setLongitude(cacheCursor.getDouble(2));
location.setAltitude(cacheCursor.getDouble(3));
location.setAccuracy(cacheCursor.getInt(4));
location.setProvider(cacheCursor.getString(5));
obs.setLocation(location);
obs.setObservationType(cacheCursor.getString(6));
obs.setObservationUnit(cacheCursor.getString(7));
obs.setObservationValue(cacheCursor.getDouble(8));
obs.setSharing(cacheCursor.getString(9));
obs.setTime(cacheCursor.getLong(10));
obs.setTimeZoneOffset(cacheCursor.getLong(11));
obs.setUser_id(cacheCursor.getString(12));
obs.setTrend(cacheCursor.getString(18));
// TODO: Add sensor information
cacheResults.add(obs);
}
try {
msg.replyTo.send(Message.obtain(null, MSG_API_RECENTS,
cacheResults));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_MAKE_API_CALL:
CbApi api = new CbApi(getApplicationContext());
CbApiCall liveApiCall = (CbApiCall) msg.obj;
long timeDiff = System.currentTimeMillis() - lastAPICall;
if(timeDiff > 1000 * 3) {
lastAPICall = api.makeAPICall(liveApiCall, service, msg.replyTo);
} else {
log("service asked for api call, time diff too short " + timeDiff);
}
break;
case MSG_CLEAR_LOCAL_CACHE:
db.open();
db.clearLocalCache();
db.close();
break;
case MSG_REMOVE_FROM_PRESSURENET:
// TODO: Implement
break;
case MSG_CLEAR_API_CACHE:
db.open();
db.clearAPICache();
db.close();
break;
case MSG_ADD_CURRENT_CONDITION:
CbCurrentCondition cc = (CbCurrentCondition) msg.obj;
db.open();
db.addCondition(cc);
db.close();
break;
case MSG_GET_CURRENT_CONDITIONS:
db.open();
CbApiCall currentConditionAPI = (CbApiCall) msg.obj;
Cursor ccCursor = db.getCurrentConditions(
currentConditionAPI.getMinLat(),
currentConditionAPI.getMaxLat(),
currentConditionAPI.getMinLon(),
currentConditionAPI.getMaxLon(),
currentConditionAPI.getStartTime(),
currentConditionAPI.getEndTime(),
1000);
ArrayList<CbCurrentCondition> conditions = new ArrayList<CbCurrentCondition>();
while(ccCursor.moveToNext()) {
CbCurrentCondition cur = new CbCurrentCondition();
Location location = new Location("network");
location.setLatitude(ccCursor.getDouble(1));
location.setLongitude(ccCursor.getDouble(2));
location.setAltitude(ccCursor.getDouble(3));
location.setAccuracy(ccCursor.getInt(4));
location.setProvider(ccCursor.getString(5));
cur.setLocation(location);
cur.setTime(ccCursor.getLong(6));
cur.setTime(ccCursor.getLong(7));
cur.setUser_id(ccCursor.getString(9));
cur.setGeneral_condition(ccCursor.getString(10));
cur.setWindy(ccCursor.getString(11));
cur.setFog_thickness(ccCursor.getString(12));
cur.setCloud_type(ccCursor.getString(13));
cur.setPrecipitation_type(ccCursor.getString(14));
cur.setPrecipitation_amount(ccCursor.getDouble(15));
cur.setPrecipitation_unit(ccCursor.getString(16));
cur.setThunderstorm_intensity(ccCursor.getString(17));
cur.setUser_comment(ccCursor.getString(18));
conditions.add(cur);
}
db.close();
try {
msg.replyTo.send(Message.obtain(null, MSG_CURRENT_CONDITIONS,
conditions));
} catch (RemoteException re) {
re.printStackTrace();
}
break;
case MSG_SEND_CURRENT_CONDITION:
CbCurrentCondition condition = (CbCurrentCondition) msg.obj;
sendCbCurrentCondition(condition);
break;
case MSG_SEND_OBSERVATION:
// TODO: Implement
break;
case MSG_MAKE_CURRENT_CONDITIONS_API_CALL:
CbApi conditionApi = new CbApi(getApplicationContext());
CbApiCall conditionApiCall = (CbApiCall) msg.obj;
conditionApiCall.setCallType("Conditions");
conditionApi.makeAPICall(conditionApiCall, service, msg.replyTo);
break;
default:
super.handleMessage(msg);
}
}
|
diff --git a/db/src/main/java/com/psddev/dari/db/SqlDatabase.java b/db/src/main/java/com/psddev/dari/db/SqlDatabase.java
index 745dce31..a7e6c23b 100644
--- a/db/src/main/java/com/psddev/dari/db/SqlDatabase.java
+++ b/db/src/main/java/com/psddev/dari/db/SqlDatabase.java
@@ -1,2356 +1,2355 @@
package com.psddev.dari.db;
import com.jolbox.bonecp.BoneCPDataSource;
import com.psddev.dari.util.ObjectUtils;
import com.psddev.dari.util.PaginatedResult;
import com.psddev.dari.util.PeriodicValue;
import com.psddev.dari.util.Profiler;
import com.psddev.dari.util.PullThroughValue;
import com.psddev.dari.util.Settings;
import com.psddev.dari.util.SettingsException;
import com.psddev.dari.util.Stats;
import com.psddev.dari.util.StringUtils;
import com.psddev.dari.util.TypeDefinition;
import java.io.ByteArrayInputStream;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.ref.WeakReference;
import java.sql.BatchUpdateException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLIntegrityConstraintViolationException;
import java.sql.SQLTimeoutException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import javax.sql.DataSource;
import org.iq80.snappy.Snappy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Database backed by a SQL engine. */
public class SqlDatabase extends AbstractDatabase<Connection> {
public static final String DATA_SOURCE_SETTING = "dataSource";
public static final String JDBC_DRIVER_CLASS_SETTING = "jdbcDriverClass";
public static final String JDBC_URL_SETTING = "jdbcUrl";
public static final String JDBC_USER_SETTING = "jdbcUser";
public static final String JDBC_PASSWORD_SETTING = "jdbcPassword";
public static final String JDBC_POOL_SIZE_SETTING = "jdbcPoolSize";
public static final String READ_DATA_SOURCE_SETTING = "readDataSource";
public static final String READ_JDBC_DRIVER_CLASS_SETTING = "readJdbcDriverClass";
public static final String READ_JDBC_URL_SETTING = "readJdbcUrl";
public static final String READ_JDBC_USER_SETTING = "readJdbcUser";
public static final String READ_JDBC_PASSWORD_SETTING = "readJdbcPassword";
public static final String READ_JDBC_POOL_SIZE_SETTING = "readJdbcPoolSize";
public static final String VENDOR_CLASS_SETTING = "vendorClass";
public static final String COMPRESS_DATA_SUB_SETTING = "compressData";
public static final String RECORD_TABLE = "Record";
public static final String RECORD_UPDATE_TABLE = "RecordUpdate";
public static final String SYMBOL_TABLE = "Symbol";
public static final String ID_COLUMN = "id";
public static final String TYPE_ID_COLUMN = "typeId";
public static final String IN_ROW_INDEX_COLUMN = "inRowIndex";
public static final String DATA_COLUMN = "data";
public static final String SYMBOL_ID_COLUMN = "symbolId";
public static final String UPDATE_DATE_COLUMN = "updateDate";
public static final String VALUE_COLUMN = "value";
public static final String CONNECTION_QUERY_OPTION = "sql.connection";
public static final String EXTRA_COLUMNS_QUERY_OPTION = "sql.extraColumns";
public static final String EXTRA_JOINS_QUERY_OPTION = "sql.extraJoins";
public static final String EXTRA_WHERE_QUERY_OPTION = "sql.extraWhere";
public static final String EXTRA_HAVING_QUERY_OPTION = "sql.extraHaving";
public static final String MYSQL_INDEX_HINT_QUERY_OPTION = "sql.mysqlIndexHint";
public static final String RETURN_ORIGINAL_DATA_QUERY_OPTION = "sql.returnOriginalData";
public static final String USE_JDBC_FETCH_SIZE_QUERY_OPTION = "sql.useJdbcFetchSize";
public static final String USE_READ_DATA_SOURCE_QUERY_OPTION = "sql.useReadDataSource";
public static final String SKIP_INDEX_STATE_EXTRA = "sql.skipIndex";
public static final String INDEX_TABLE_INDEX_OPTION = "sql.indexTable";
public static final String EXTRA_COLUMN_EXTRA_PREFIX = "sql.extraColumn.";
public static final String ORIGINAL_DATA_EXTRA = "sql.originalData";
private static final Logger LOGGER = LoggerFactory.getLogger(SqlDatabase.class);
private static final String SHORT_NAME = "SQL";
private static final Stats STATS = new Stats(SHORT_NAME);
private static final String QUERY_STATS_OPERATION = "Query";
private static final String UPDATE_STATS_OPERATION = "Update";
private static final String QUERY_PROFILER_EVENT = SHORT_NAME + " " + QUERY_STATS_OPERATION;
private static final String UPDATE_PROFILER_EVENT = SHORT_NAME + " " + UPDATE_STATS_OPERATION;
private final static List<SqlDatabase> INSTANCES = new ArrayList<SqlDatabase>();
{
INSTANCES.add(this);
}
private volatile DataSource dataSource;
private volatile DataSource readDataSource;
private volatile SqlVendor vendor;
private volatile boolean compressData;
/**
* Quotes the given {@code identifier} so that it's safe to use
* in a SQL query.
*/
public static String quoteIdentifier(String identifier) {
return "\"" + StringUtils.replaceAll(identifier, "\\\\", "\\\\\\\\", "\"", "\"\"") + "\"";
}
/**
* Quotes the given {@code value} so that it's safe to use
* in a SQL query.
*/
public static String quoteValue(Object value) {
if (value == null) {
return "NULL";
} else if (value instanceof Number) {
return value.toString();
} else if (value instanceof byte[]) {
return "X'" + StringUtils.hex((byte[]) value) + "'";
} else {
return "'" + value.toString().replace("'", "''").replace("\\", "\\\\") + "'";
}
}
/** Closes all resources used by all instances. */
public static void closeAll() {
for (SqlDatabase database : INSTANCES) {
database.close();
}
INSTANCES.clear();
}
/**
* Creates an {@link SqlDatabaseException} that occurred during
* an execution of a query.
*/
private SqlDatabaseException createQueryException(
SQLException error,
String sqlQuery,
Query<?> query) {
String message = error.getMessage();
if (error instanceof SQLTimeoutException || message.contains("timeout")) {
return new SqlDatabaseException.ReadTimeout(this, error, sqlQuery, query);
} else {
return new SqlDatabaseException(this, error, sqlQuery, query);
}
}
/** Returns the JDBC data source used for general database operations. */
public DataSource getDataSource() {
return dataSource;
}
private static final Map<String, Class<? extends SqlVendor>> VENDOR_CLASSES; static {
Map<String, Class<? extends SqlVendor>> m = new HashMap<String, Class<? extends SqlVendor>>();
m.put("H2", SqlVendor.H2.class);
m.put("MySQL", SqlVendor.MySQL.class);
m.put("PostgreSQL", SqlVendor.PostgreSQL.class);
m.put("Oracle", SqlVendor.Oracle.class);
VENDOR_CLASSES = m;
}
/** Sets the JDBC data source used for general database operations. */
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
if (dataSource == null) {
return;
}
synchronized (this) {
try {
boolean writable = false;
if (vendor == null) {
Connection connection;
try {
connection = openConnection();
writable = true;
} catch (DatabaseException error) {
LOGGER.debug("Can't read vendor information from the writable server!", error);
connection = openReadConnection();
}
try {
DatabaseMetaData meta = connection.getMetaData();
String vendorName = meta.getDatabaseProductName();
Class<? extends SqlVendor> vendorClass = VENDOR_CLASSES.get(vendorName);
LOGGER.info(
"Initializing SQL vendor for [{}]: [{}] -> [{}]",
new Object[] { getName(), vendorName, vendorClass });
vendor = vendorClass != null ? TypeDefinition.getInstance(vendorClass).newInstance() : new SqlVendor();
vendor.setDatabase(this);
} finally {
closeConnection(connection);
}
}
tableNames.refresh();
symbols.invalidate();
if (writable) {
vendor.createRecord(this);
vendor.createRecordUpdate(this);
vendor.createSymbol(this);
for (SqlIndex index : SqlIndex.values()) {
if (index != SqlIndex.CUSTOM) {
vendor.createRecordIndex(
this,
index.getReadTable(this, null).getName(this, null),
index);
}
}
tableNames.refresh();
symbols.invalidate();
}
} catch (SQLException ex) {
throw new SqlDatabaseException(this, "Can't check for required tables!", ex);
}
}
}
/** Returns the JDBC data source used exclusively for read operations. */
public DataSource getReadDataSource() {
return this.readDataSource;
}
/** Sets the JDBC data source used exclusively for read operations. */
public void setReadDataSource(DataSource readDataSource) {
this.readDataSource = readDataSource;
}
/** Returns the vendor-specific SQL engine information. */
public SqlVendor getVendor() {
return vendor;
}
/** Sets the vendor-specific SQL engine information. */
public void setVendor(SqlVendor vendor) {
this.vendor = vendor;
}
/** Returns {@code true} if the data should be compressed. */
public boolean isCompressData() {
return compressData;
}
/** Sets whether the data should be compressed. */
public void setCompressData(boolean compressData) {
this.compressData = compressData;
}
/**
* Returns {@code true} if the {@link #RECORD_TABLE} in this database
* has the {@link #IN_ROW_INDEX_COLUMN}.
*/
public boolean hasInRowIndex() {
return hasInRowIndex;
}
/**
* Returns {@code true} if all comparisons executed in this database
* should ignore case by default.
*/
public boolean comparesIgnoreCase() {
return comparesIgnoreCase;
}
/**
* Returns {@code true} if this database contains a table with
* the given {@code name}.
*/
public boolean hasTable(String name) {
if (name == null) {
return false;
} else {
Set<String> names = tableNames.get();
return names != null && names.contains(name.toLowerCase());
}
}
private transient volatile boolean hasInRowIndex;
private transient volatile boolean comparesIgnoreCase;
private final transient PeriodicValue<Set<String>> tableNames = new PeriodicValue<Set<String>>(0.0, 60.0) {
@Override
protected Set<String> update() {
if (getDataSource() == null) {
return Collections.emptySet();
}
Connection connection;
try {
connection = openConnection();
} catch (DatabaseException error) {
LOGGER.debug("Can't read table names from the writable server!", error);
connection = openReadConnection();
}
try {
SqlVendor vendor = getVendor();
String recordTable = null;
int maxStringVersion = 0;
Set<String> loweredNames = new HashSet<String>();
for (String name : vendor.getTables(connection)) {
String loweredName = name.toLowerCase();
loweredNames.add(loweredName);
if ("record".equals(loweredName)) {
recordTable = name;
} else if (loweredName.startsWith("recordstring")) {
int version = ObjectUtils.to(int.class, loweredName.substring(12));
if (version > maxStringVersion) {
maxStringVersion = version;
}
}
}
if (recordTable != null) {
hasInRowIndex = vendor.hasInRowIndex(connection, recordTable);
}
comparesIgnoreCase = maxStringVersion >= 3;
return loweredNames;
} catch (SQLException error) {
LOGGER.error("Can't query table names!", error);
return get();
} finally {
closeConnection(connection);
}
}
};
/**
* Returns an unique numeric ID for the given {@code symbol}.
*/
public int getSymbolId(String symbol) {
Integer id = symbols.get().get(symbol);
if (id == null) {
SqlVendor vendor = getVendor();
Connection connection = openConnection();
try {
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT /*! IGNORE */ INTO ");
vendor.appendIdentifier(insertBuilder, SYMBOL_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, VALUE_COLUMN);
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, symbol, parameters);
insertBuilder.append(")");
String insertSql = insertBuilder.toString();
try {
Static.executeUpdateWithList(connection, insertSql, parameters);
} catch (SQLException ex) {
if (!Static.isIntegrityConstraintViolation(ex)) {
throw createQueryException(ex, insertSql, null);
}
}
StringBuilder selectBuilder = new StringBuilder();
selectBuilder.append("SELECT ");
vendor.appendIdentifier(selectBuilder, SYMBOL_ID_COLUMN);
selectBuilder.append(" FROM ");
vendor.appendIdentifier(selectBuilder, SYMBOL_TABLE);
selectBuilder.append(" WHERE ");
vendor.appendIdentifier(selectBuilder, VALUE_COLUMN);
selectBuilder.append("=");
vendor.appendValue(selectBuilder, symbol);
String selectSql = selectBuilder.toString();
Statement statement = null;
ResultSet result = null;
try {
statement = connection.createStatement();
result = statement.executeQuery(selectSql);
result.next();
id = result.getInt(1);
symbols.get().put(symbol, id);
} catch (SQLException ex) {
throw createQueryException(ex, selectSql, null);
} finally {
closeResources(null, null, statement, result);
}
} finally {
closeConnection(connection);
}
}
return id;
}
// Cache of all internal symbols.
private transient PullThroughValue<Map<String, Integer>> symbols = new PullThroughValue<Map<String, Integer>>() {
@Override
protected Map<String, Integer> produce() {
SqlVendor vendor = getVendor();
StringBuilder selectBuilder = new StringBuilder();
selectBuilder.append("SELECT ");
vendor.appendIdentifier(selectBuilder, SYMBOL_ID_COLUMN);
selectBuilder.append(",");
vendor.appendIdentifier(selectBuilder, VALUE_COLUMN);
selectBuilder.append(" FROM ");
vendor.appendIdentifier(selectBuilder, SYMBOL_TABLE);
String selectSql = selectBuilder.toString();
Connection connection;
Statement statement = null;
ResultSet result = null;
try {
connection = openConnection();
} catch (DatabaseException error) {
LOGGER.debug("Can't read symbols from the writable server!", error);
connection = openReadConnection();
}
try {
statement = connection.createStatement();
result = statement.executeQuery(selectSql);
Map<String, Integer> symbols = new ConcurrentHashMap<String, Integer>();
while (result.next()) {
symbols.put(new String(result.getBytes(2), StringUtils.UTF_8), result.getInt(1));
}
return symbols;
} catch (SQLException ex) {
throw createQueryException(ex, selectSql, null);
} finally {
closeResources(null, connection, statement, result);
}
}
};
/**
* Returns the underlying JDBC connection.
*
* @deprecated Use {@link #openConnection} instead.
*/
@Deprecated
public Connection getConnection() {
return openConnection();
}
/** Closes any resources used by this database. */
public void close() {
DataSource dataSource = getDataSource();
if (dataSource instanceof BoneCPDataSource) {
LOGGER.info("Closing BoneCP data source in {}", getName());
((BoneCPDataSource) dataSource).close();
}
DataSource readDataSource = getReadDataSource();
if (readDataSource instanceof BoneCPDataSource) {
LOGGER.info("Closing BoneCP read data source in {}", getName());
((BoneCPDataSource) readDataSource).close();
}
setDataSource(null);
setReadDataSource(null);
}
/**
* Builds an SQL statement that can be used to get a count of all
* objects matching the given {@code query}.
*/
public String buildCountStatement(Query<?> query) {
return new SqlQuery(this, query).countStatement();
}
/**
* Builds an SQL statement that can be used to delete all rows
* matching the given {@code query}.
*/
public String buildDeleteStatement(Query<?> query) {
return new SqlQuery(this, query).deleteStatement();
}
/**
* Builds an SQL statement that can be used to get all objects
* grouped by the values of the given {@code groupFields}.
*/
public String buildGroupStatement(Query<?> query, String... groupFields) {
return new SqlQuery(this, query).groupStatement(groupFields);
}
/**
* Builds an SQL statement that can be used to get when the objects
* matching the given {@code query} were last updated.
*/
public String buildLastUpdateStatement(Query<?> query) {
return new SqlQuery(this, query).lastUpdateStatement();
}
/**
* Builds an SQL statement that can be used to list all rows
* matching the given {@code query}.
*/
public String buildSelectStatement(Query<?> query) {
return new SqlQuery(this, query).selectStatement();
}
/** Closes all the given SQL resources safely. */
private void closeResources(Query<?> query, Connection connection, Statement statement, ResultSet result) {
if (result != null) {
try {
result.close();
} catch (SQLException ex) {
}
}
if (statement != null) {
try {
statement.close();
} catch (SQLException ex) {
}
}
if (connection != null &&
(query == null ||
!connection.equals(query.getOptions().get(CONNECTION_QUERY_OPTION)))) {
try {
connection.close();
} catch (SQLException ex) {
}
}
}
private byte[] serializeState(State state) {
Map<String, Object> values = state.getSimpleValues();
for (Iterator<Map.Entry<String, Object>> i = values.entrySet().iterator(); i.hasNext(); ) {
Map.Entry<String, Object> entry = i.next();
ObjectField field = state.getField(entry.getKey());
if (field != null) {
if (field.as(FieldData.class).isIndexTableSourceFromAnywhere()) {
i.remove();
}
}
}
byte[] dataBytes = ObjectUtils.toJson(values).getBytes(StringUtils.UTF_8);
if (isCompressData()) {
byte[] compressed = new byte[Snappy.maxCompressedLength(dataBytes.length)];
int compressedLength = Snappy.compress(dataBytes, 0, dataBytes.length, compressed, 0);
dataBytes = new byte[compressedLength + 1];
dataBytes[0] = 's';
System.arraycopy(compressed, 0, dataBytes, 1, compressedLength);
}
return dataBytes;
}
@SuppressWarnings("unchecked")
private Map<String, Object> unserializeData(byte[] dataBytes) {
char format = '\0';
while (true) {
format = (char) dataBytes[0];
if (format == 's') {
dataBytes = Snappy.uncompress(dataBytes, 1, dataBytes.length - 1);
} else if (format == '{') {
return (Map<String, Object>) ObjectUtils.fromJson(new String(dataBytes, StringUtils.UTF_8));
} else {
break;
}
}
throw new IllegalStateException(String.format(
"Unknown format! ([%s])", format));
}
/**
* Creates a previously saved object using the given {@code resultSet}.
*/
private <T> T createSavedObjectWithResultSet(ResultSet resultSet, Query<T> query) throws SQLException {
T object = createSavedObject(resultSet.getObject(2), resultSet.getObject(1), query);
State objectState = State.getInstance(object);
if (!objectState.isReferenceOnly()) {
byte[] data = resultSet.getBytes(3);
if (data != null) {
objectState.putAll(unserializeData(data));
Boolean returnOriginal = ObjectUtils.to(Boolean.class, query.getOptions().get(RETURN_ORIGINAL_DATA_QUERY_OPTION));
if (returnOriginal == null) {
returnOriginal = Boolean.FALSE;
}
if (returnOriginal) {
objectState.getExtras().put(ORIGINAL_DATA_EXTRA, data);
}
}
}
ResultSetMetaData meta = resultSet.getMetaData();
for (int i = 4, count = meta.getColumnCount(); i <= count; ++ i) {
String columnName = meta.getColumnLabel(i);
if (query.getExtraSourceColumns().contains(columnName)) {
objectState.put(columnName, resultSet.getObject(i));
} else {
objectState.getExtras().put(EXTRA_COLUMN_EXTRA_PREFIX + meta.getColumnLabel(i), resultSet.getObject(i));
}
}
// Load some extra column from source index tables.
@SuppressWarnings("unchecked")
Set<UUID> unresolvedTypeIds = (Set<UUID>) query.getOptions().get(State.UNRESOLVED_TYPE_IDS_QUERY_OPTION);
Set<ObjectType> queryTypes = query.getConcreteTypes(getEnvironment());
ObjectType type = objectState.getType();
HashSet<ObjectField> loadExtraFields = new HashSet<ObjectField>();
if (type != null &&
(unresolvedTypeIds == null || !unresolvedTypeIds.contains(type.getId())) &&
!queryTypes.contains(type)) {
for (ObjectField field : type.getFields()) {
SqlDatabase.FieldData fieldData = field.as(SqlDatabase.FieldData.class);
if (fieldData.isIndexTableSource()) {
loadExtraFields.add(field);
}
}
}
if (loadExtraFields != null) {
Connection connection = openQueryConnection(query);
try {
for (ObjectField field : loadExtraFields) {
Statement extraStatement = null;
ResultSet extraResult = null;
try {
extraStatement = connection.createStatement();
extraResult = executeQueryBeforeTimeout(
extraStatement,
extraSourceSelectStatementById(field, objectState.getId()),
getQueryReadTimeout(query));
if (extraResult.next()) {
meta = extraResult.getMetaData();
for (int i = 1, count = meta.getColumnCount(); i <= count; ++ i) {
objectState.put(meta.getColumnLabel(i), extraResult.getObject(i));
}
}
} finally {
closeResources(null, null, extraStatement, extraResult);
}
}
} finally {
closeResources(query, connection, null, null);
}
}
return swapObjectType(query, object);
}
// Creates an SQL statement to return a single row from a FieldIndexTable
// used as a source table.
//
// TODO, maybe: move this to SqlQuery and use initializeClauses() and
// needsRecordTable=false instead of passing id to this method. Needs
// countperformance branch to do this.
private String extraSourceSelectStatementById(ObjectField field, UUID id) {
FieldData fieldData = field.as(FieldData.class);
ObjectType parentType = field.getParentType();
StringBuilder keyName = new StringBuilder(parentType.getInternalName());
keyName.append("/");
keyName.append(field.getInternalName());
Query<?> query = Query.fromType(parentType);
Query.MappedKey key = query.mapEmbeddedKey(getEnvironment(), keyName.toString());
ObjectIndex useIndex = null;
for (ObjectIndex index : key.getIndexes()) {
if (index.getFields().get(0) == field.getInternalName()) {
useIndex = index;
break;
}
}
SqlIndex useSqlIndex = SqlIndex.Static.getByIndex(useIndex);
SqlIndex.Table indexTable = useSqlIndex.getReadTable(this, useIndex);
String sourceTableName = fieldData.getIndexTable();
int symbolId = getSymbolId(key.getIndexKey(useIndex));
StringBuilder sql = new StringBuilder();
int fieldIndex = 0;
sql.append("SELECT ");
for (String indexFieldName : useIndex.getFields()) {
String indexColumnName = indexTable.getValueField(this, useIndex, fieldIndex);
++ fieldIndex;
vendor.appendIdentifier(sql, indexColumnName);
sql.append(" AS ");
vendor.appendIdentifier(sql, indexFieldName);
sql.append(", ");
}
sql.setLength(sql.length() - 2);
sql.append(" FROM ");
vendor.appendIdentifier(sql, sourceTableName);
sql.append(" WHERE ");
vendor.appendIdentifier(sql, "id");
sql.append(" = ");
vendor.appendValue(sql, id);
sql.append(" AND ");
vendor.appendIdentifier(sql, "symbolId");
sql.append(" = ");
sql.append(symbolId);
return sql.toString();
}
/**
* Executes the given read {@code statement} (created from the given
* {@code sqlQuery}) before the given {@code timeout} (in seconds).
*/
private ResultSet executeQueryBeforeTimeout(
Statement statement,
String sqlQuery,
int timeout)
throws SQLException {
if (timeout > 0 && !(vendor instanceof SqlVendor.PostgreSQL)) {
statement.setQueryTimeout(timeout);
}
Stats.Timer timer = STATS.startTimer();
Profiler.Static.startThreadEvent(QUERY_PROFILER_EVENT);
try {
return statement.executeQuery(sqlQuery);
} finally {
double duration = timer.stop(QUERY_STATS_OPERATION);
Profiler.Static.stopThreadEvent(sqlQuery);
LOGGER.debug(
"Read from the SQL database using [{}] in [{}]ms",
sqlQuery, duration);
}
}
/**
* Selects the first object that matches the given {@code sqlQuery}
* with options from the given {@code query}.
*/
public <T> T selectFirstWithOptions(String sqlQuery, Query<T> query) {
sqlQuery = vendor.rewriteQueryWithLimitClause(sqlQuery, 1, 0);
Connection connection = null;
Statement statement = null;
ResultSet result = null;
try {
connection = openQueryConnection(query);
statement = connection.createStatement();
result = executeQueryBeforeTimeout(statement, sqlQuery, getQueryReadTimeout(query));
return result.next() ? createSavedObjectWithResultSet(result, query) : null;
} catch (SQLException ex) {
throw createQueryException(ex, sqlQuery, query);
} finally {
closeResources(query, connection, statement, result);
}
}
/**
* Selects the first object that matches the given {@code sqlQuery}
* without a timeout.
*/
public Object selectFirst(String sqlQuery) {
return selectFirstWithOptions(sqlQuery, null);
}
/**
* Selects a list of objects that match the given {@code sqlQuery}
* with options from the given {@code query}.
*/
public <T> List<T> selectListWithOptions(String sqlQuery, Query<T> query) {
Connection connection = null;
Statement statement = null;
ResultSet result = null;
List<T> objects = new ArrayList<T>();
int timeout = getQueryReadTimeout(query);
try {
connection = openQueryConnection(query);
statement = connection.createStatement();
result = executeQueryBeforeTimeout(statement, sqlQuery, timeout);
while (result.next()) {
objects.add(createSavedObjectWithResultSet(result, query));
}
return objects;
} catch (SQLException ex) {
throw createQueryException(ex, sqlQuery, query);
} finally {
closeResources(query, connection, statement, result);
}
}
/**
* Selects a list of objects that match the given {@code sqlQuery}
* without a timeout.
*/
public List<Object> selectList(String sqlQuery) {
return selectListWithOptions(sqlQuery, null);
}
/**
* Returns an iterable that selects all objects matching the given
* {@code sqlQuery} with options from the given {@code query}.
*/
public <T> Iterable<T> selectIterableWithOptions(
final String sqlQuery,
final int fetchSize,
final Query<T> query) {
return new Iterable<T>() {
@Override
public Iterator<T> iterator() {
return new SqlIterator<T>(sqlQuery, fetchSize, query);
}
};
}
private class SqlIterator<T> implements Iterator<T> {
private final String sqlQuery;
private final Query<T> query;
private final Connection connection;
private final Statement statement;
private final ResultSet result;
private boolean hasNext = true;
public SqlIterator(String initialSqlQuery, int fetchSize, Query<T> initialQuery) {
sqlQuery = initialSqlQuery;
query = initialQuery;
try {
connection = openReadConnection();
statement = connection.createStatement();
statement.setFetchSize(
getVendor() instanceof SqlVendor.MySQL ? Integer.MIN_VALUE :
fetchSize <= 0 ? 200 :
fetchSize);
result = statement.executeQuery(sqlQuery);
moveToNext();
} catch (SQLException ex) {
close();
throw createQueryException(ex, sqlQuery, query);
}
}
private void moveToNext() throws SQLException {
if (hasNext) {
hasNext = result.next();
if (!hasNext) {
close();
}
}
}
public void close() {
hasNext = false;
closeResources(query, connection, statement, result);
}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public T next() {
if (!hasNext) {
throw new NoSuchElementException();
}
try {
T object = createSavedObjectWithResultSet(result, query);
moveToNext();
return object;
} catch (SQLException ex) {
close();
throw createQueryException(ex, sqlQuery, query);
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
protected void finalize() {
close();
}
}
/**
* Fills the placeholders in the given {@code sqlQuery} with the given
* {@code parameters}.
*/
private static String fillPlaceholders(String sqlQuery, Object... parameters) {
StringBuilder filled = new StringBuilder();
int prevPh = 0;
for (int ph, index = 0; (ph = sqlQuery.indexOf('?', prevPh)) > -1; ++ index) {
filled.append(sqlQuery.substring(prevPh, ph));
prevPh = ph + 1;
filled.append(quoteValue(parameters[index]));
}
filled.append(sqlQuery.substring(prevPh));
return filled.toString();
}
/**
* Executes the given write {@code sqlQuery} with the given
* {@code parameters}.
*
* @deprecated Use {@link Static#executeUpdate} instead.
*/
@Deprecated
public int executeUpdate(String sqlQuery, Object... parameters) {
try {
return Static.executeUpdateWithArray(getConnection(), sqlQuery, parameters);
} catch (SQLException ex) {
throw createQueryException(ex, fillPlaceholders(sqlQuery, parameters), null);
}
}
/**
* Reads the given {@code resultSet} into a list of maps
* and closes it.
*/
public List<Map<String, Object>> readResultSet(ResultSet resultSet) throws SQLException {
try {
ResultSetMetaData meta = resultSet.getMetaData();
List<String> columnNames = new ArrayList<String>();
for (int i = 1, count = meta.getColumnCount(); i < count; ++ i) {
columnNames.add(meta.getColumnName(i));
}
List<Map<String, Object>> maps = new ArrayList<Map<String, Object>>();
while (resultSet.next()) {
Map<String, Object> map = new LinkedHashMap<String, Object>();
maps.add(map);
for (int i = 0, size = columnNames.size(); i < size; ++ i) {
map.put(columnNames.get(i), resultSet.getObject(i + 1));
}
}
return maps;
} finally {
resultSet.close();
}
}
// --- AbstractDatabase support ---
@Override
public Connection openConnection() {
DataSource dataSource = getDataSource();
if (dataSource == null) {
throw new SqlDatabaseException(this, "No SQL data source!");
}
try {
return dataSource.getConnection();
} catch (SQLException ex) {
throw new SqlDatabaseException(this, "Can't connect to the SQL engine!", ex);
}
}
@Override
protected Connection doOpenReadConnection() {
DataSource readDataSource = getReadDataSource();
if (readDataSource == null) {
readDataSource = getDataSource();
}
if (readDataSource == null) {
throw new SqlDatabaseException(this, "No SQL data source!");
}
try {
return readDataSource.getConnection();
} catch (SQLException ex) {
throw new SqlDatabaseException(this, "Can't connect to the SQL engine!", ex);
}
}
// Opens a connection that should be used to execute the given query.
private Connection openQueryConnection(Query<?> query) {
if (query != null) {
Connection connection = (Connection) query.getOptions().get(CONNECTION_QUERY_OPTION);
if (connection != null) {
return connection;
}
Boolean useRead = ObjectUtils.to(Boolean.class, query.getOptions().get(USE_READ_DATA_SOURCE_QUERY_OPTION));
if (useRead == null) {
useRead = Boolean.TRUE;
}
if (!useRead) {
return openConnection();
}
}
return openReadConnection();
}
@Override
public void closeConnection(Connection connection) {
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
}
}
}
@Override
protected boolean isRecoverableError(Exception error) {
if (error instanceof SQLException) {
SQLException sqlError = (SQLException) error;
return "40001".equals(sqlError.getSQLState());
}
return false;
}
@Override
protected void doInitialize(String settingsKey, Map<String, Object> settings) {
close();
setReadDataSource(createDataSource(
settings,
READ_DATA_SOURCE_SETTING,
READ_JDBC_DRIVER_CLASS_SETTING,
READ_JDBC_URL_SETTING,
READ_JDBC_USER_SETTING,
READ_JDBC_PASSWORD_SETTING,
READ_JDBC_POOL_SIZE_SETTING));
setDataSource(createDataSource(
settings,
DATA_SOURCE_SETTING,
JDBC_DRIVER_CLASS_SETTING,
JDBC_URL_SETTING,
JDBC_USER_SETTING,
JDBC_PASSWORD_SETTING,
JDBC_POOL_SIZE_SETTING));
String vendorClassName = ObjectUtils.to(String.class, settings.get(VENDOR_CLASS_SETTING));
Class<?> vendorClass = null;
if (vendorClassName != null) {
vendorClass = ObjectUtils.getClassByName(vendorClassName);
if (vendorClass == null) {
throw new SettingsException(
VENDOR_CLASS_SETTING,
String.format("Can't find [%s]!",
vendorClassName));
} else if (!SqlVendor.class.isAssignableFrom(vendorClass)) {
throw new SettingsException(
VENDOR_CLASS_SETTING,
String.format("[%s] doesn't implement [%s]!",
vendorClass, Driver.class));
}
}
if (vendorClass != null) {
setVendor((SqlVendor) TypeDefinition.getInstance(vendorClass).newInstance());
}
Boolean compressData = ObjectUtils.coalesce(
ObjectUtils.to(Boolean.class, settings.get(COMPRESS_DATA_SUB_SETTING)),
Settings.get(Boolean.class, "dari/isCompressSqlData"));
if (compressData != null) {
setCompressData(compressData);
}
}
private static final Map<String, String> DRIVER_CLASS_NAMES; static {
Map<String, String> m = new HashMap<String, String>();
m.put("h2", "org.h2.Driver");
m.put("jtds", "net.sourceforge.jtds.jdbc.Driver");
m.put("mysql", "com.mysql.jdbc.Driver");
m.put("postgresql", "org.postgresql.Driver");
DRIVER_CLASS_NAMES = m;
}
private static final Set<WeakReference<Driver>> REGISTERED_DRIVERS = new HashSet<WeakReference<Driver>>();
private DataSource createDataSource(
Map<String, Object> settings,
String dataSourceSetting,
String jdbcDriverClassSetting,
String jdbcUrlSetting,
String jdbcUserSetting,
String jdbcPasswordSetting,
String jdbcPoolSizeSetting) {
Object dataSourceObject = settings.get(dataSourceSetting);
if (dataSourceObject instanceof DataSource) {
return (DataSource) dataSourceObject;
} else {
String url = ObjectUtils.to(String.class, settings.get(jdbcUrlSetting));
if (ObjectUtils.isBlank(url)) {
return null;
} else {
String driverClassName = ObjectUtils.to(String.class, settings.get(jdbcDriverClassSetting));
Class<?> driverClass = null;
if (driverClassName != null) {
driverClass = ObjectUtils.getClassByName(driverClassName);
if (driverClass == null) {
throw new SettingsException(
jdbcDriverClassSetting,
String.format("Can't find [%s]!",
driverClassName));
} else if (!Driver.class.isAssignableFrom(driverClass)) {
throw new SettingsException(
jdbcDriverClassSetting,
String.format("[%s] doesn't implement [%s]!",
driverClass, Driver.class));
}
} else {
int firstColonAt = url.indexOf(':');
if (firstColonAt > -1) {
++ firstColonAt;
int secondColonAt = url.indexOf(':', firstColonAt);
if (secondColonAt > -1) {
driverClass = ObjectUtils.getClassByName(DRIVER_CLASS_NAMES.get(url.substring(firstColonAt, secondColonAt)));
}
}
}
if (driverClass != null) {
Driver driver = null;
for (Enumeration<Driver> e = DriverManager.getDrivers(); e.hasMoreElements(); ) {
Driver d = e.nextElement();
if (driverClass.isInstance(d)) {
driver = d;
break;
}
}
if (driver == null) {
driver = (Driver) TypeDefinition.getInstance(driverClass).newInstance();
try {
LOGGER.info("Registering [{}]", driver);
DriverManager.registerDriver(driver);
} catch (SQLException ex) {
LOGGER.warn("Can't register [{}]!", driver);
}
}
if (driver != null) {
REGISTERED_DRIVERS.add(new WeakReference<Driver>(driver));
}
}
String user = ObjectUtils.to(String.class, settings.get(jdbcUserSetting));
String password = ObjectUtils.to(String.class, settings.get(jdbcPasswordSetting));
Integer poolSize = ObjectUtils.to(Integer.class, settings.get(jdbcPoolSizeSetting));
if (poolSize == null || poolSize <= 0) {
poolSize = 24;
}
int partitionCount = 3;
int connectionsPerPartition = poolSize / partitionCount;
LOGGER.info("Automatically creating BoneCP data source:" +
"\n\turl={}" +
"\n\tusername={}" +
"\n\tpoolSize={}" +
"\n\tconnectionsPerPartition={}" +
"\n\tpartitionCount={}", new Object[] {
url,
user,
poolSize,
connectionsPerPartition,
partitionCount
});
BoneCPDataSource bone = new BoneCPDataSource();
bone.setJdbcUrl(url);
bone.setUsername(user);
bone.setPassword(password);
bone.setMinConnectionsPerPartition(connectionsPerPartition);
bone.setMaxConnectionsPerPartition(connectionsPerPartition);
bone.setPartitionCount(partitionCount);
bone.setConnectionTimeoutInMs(5000L);
return bone;
}
}
}
/** Returns the read timeout associated with the given {@code query}. */
private int getQueryReadTimeout(Query<?> query) {
if (query != null) {
Double timeout = query.getTimeout();
if (timeout == null) {
timeout = getReadTimeout();
}
if (timeout > 0.0) {
return (int) Math.round(timeout);
}
}
return 0;
}
@Override
public <T> List<T> readAll(Query<T> query) {
return selectListWithOptions(buildSelectStatement(query), query);
}
@Override
public long readCount(Query<?> query) {
String sqlQuery = buildCountStatement(query);
Connection connection = null;
Statement statement = null;
ResultSet result = null;
try {
connection = openQueryConnection(query);
statement = connection.createStatement();
result = executeQueryBeforeTimeout(statement, sqlQuery, getQueryReadTimeout(query));
if (result.next()) {
Object countObj = result.getObject(1);
if (countObj instanceof Number) {
return ((Number) countObj).longValue();
}
}
return 0;
} catch (SQLException ex) {
throw createQueryException(ex, sqlQuery, query);
} finally {
closeResources(query, connection, statement, result);
}
}
@Override
public <T> T readFirst(Query<T> query) {
if (query.getSorters().isEmpty()) {
Predicate predicate = query.getPredicate();
if (predicate instanceof CompoundPredicate) {
CompoundPredicate compoundPredicate = (CompoundPredicate) predicate;
if (PredicateParser.OR_OPERATOR.equals(compoundPredicate.getOperator())) {
for (Predicate child : compoundPredicate.getChildren()) {
Query<T> childQuery = query.clone();
childQuery.setPredicate(child);
T first = readFirst(childQuery);
if (first != null) {
return first;
}
}
return null;
}
}
}
return selectFirstWithOptions(buildSelectStatement(query), query);
}
@Override
public <T> Iterable<T> readIterable(Query<T> query, int fetchSize) {
Boolean useJdbc = ObjectUtils.to(Boolean.class, query.getOptions().get(USE_JDBC_FETCH_SIZE_QUERY_OPTION));
if (useJdbc == null) {
useJdbc = Boolean.TRUE;
}
if (useJdbc) {
return selectIterableWithOptions(buildSelectStatement(query), fetchSize, query);
} else {
return new ByIdIterable<T>(query, fetchSize);
}
}
private static class ByIdIterable<T> implements Iterable<T> {
private final Query<T> query;
private final int fetchSize;
public ByIdIterable(Query<T> query, int fetchSize) {
this.query = query;
this.fetchSize = fetchSize;
}
@Override
public Iterator<T> iterator() {
return new ByIdIterator<T>(query, fetchSize);
}
}
private static class ByIdIterator<T> implements Iterator<T> {
private final Query<T> query;
private final int fetchSize;
private UUID lastTypeId;
private UUID lastId;
private List<T> items;
private int index;
public ByIdIterator(Query<T> query, int fetchSize) {
if (!query.getSorters().isEmpty()) {
throw new IllegalArgumentException("Can't iterate over a query that has sorters!");
}
this.query = query.clone().timeout(0.0).sortAscending("_type").sortAscending("_id");
this.fetchSize = fetchSize > 0 ? fetchSize : 200;
}
@Override
public boolean hasNext() {
if (items != null && items.isEmpty()) {
return false;
}
if (items == null || index >= items.size()) {
Query<T> nextQuery = query.clone();
if (lastTypeId != null) {
nextQuery.and("_type = ? and _id > ?", lastTypeId, lastId);
}
items = nextQuery.select(0, fetchSize).getItems();
int size = items.size();
if (size < 1) {
if (lastTypeId == null) {
return false;
} else {
nextQuery = query.clone().and("_type > ?", lastTypeId);
items = nextQuery.select(0, fetchSize).getItems();
size = items.size();
if (size < 1) {
return false;
}
}
}
State lastState = State.getInstance(items.get(size - 1));
lastTypeId = lastState.getTypeId();
lastId = lastState.getId();
index = 0;
}
return true;
}
@Override
public T next() {
if (hasNext()) {
T object = items.get(index);
++ index;
return object;
} else {
throw new NoSuchElementException();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
@Override
public Date readLastUpdate(Query<?> query) {
String sqlQuery = buildLastUpdateStatement(query);
Connection connection = null;
Statement statement = null;
ResultSet result = null;
try {
connection = openQueryConnection(query);
statement = connection.createStatement();
result = executeQueryBeforeTimeout(statement, sqlQuery, getQueryReadTimeout(query));
if (result.next()) {
Double date = result.getDouble(1);
if (date != null) {
return new Date((long) (date * 1000L));
}
}
return null;
} catch (SQLException ex) {
throw createQueryException(ex, sqlQuery, query);
} finally {
closeResources(query, connection, statement, result);
}
}
@Override
public <T> PaginatedResult<T> readPartial(final Query<T> query, long offset, int limit) {
List<T> objects = selectListWithOptions(
vendor.rewriteQueryWithLimitClause(buildSelectStatement(query), limit + 1, offset),
query);
int size = objects.size();
if (size <= limit) {
return new PaginatedResult<T>(offset, limit, offset + size, objects);
} else {
objects.remove(size - 1);
return new PaginatedResult<T>(offset, limit, 0, objects) {
private Long count;
@Override
public long getCount() {
if (count == null) {
count = readCount(query);
}
return count;
}
@Override
public boolean hasNext() {
return true;
}
};
}
}
@Override
public <T> PaginatedResult<Grouping<T>> readPartialGrouped(Query<T> query, long offset, int limit, String... fields) {
List<Grouping<T>> groupings = new ArrayList<Grouping<T>>();
String sqlQuery = buildGroupStatement(query, fields);
Connection connection = null;
Statement statement = null;
ResultSet result = null;
try {
connection = openQueryConnection(query);
statement = connection.createStatement();
result = executeQueryBeforeTimeout(statement, sqlQuery, getQueryReadTimeout(query));
int fieldsLength = fields.length;
int groupingsCount = 0;
for (int i = 0, last = (int) offset + limit; result.next(); ++ i, ++ groupingsCount) {
if (i < offset || i >= last) {
continue;
}
long count = ObjectUtils.to(long.class, result.getObject(1));
List<Object> keys = new ArrayList<Object>();
for (int j = 0; j < fieldsLength; ++ j) {
keys.add(result.getObject(j + 2));
}
groupings.add(new SqlGrouping<T>(keys, query, fields, count));
}
int groupingsSize = groupings.size();
for (int i = 0; i < fieldsLength; ++ i) {
ObjectField field = query.mapEmbeddedKey(getEnvironment(), fields[i]).getField();
if (field != null) {
Map<String, Object> rawKeys = new HashMap<String, Object>();
for (int j = 0; j < groupingsSize; ++ j) {
rawKeys.put(String.valueOf(j), groupings.get(j).getKeys().get(i));
}
String itemType = field.getInternalItemType();
if (ObjectField.RECORD_TYPE.equals(itemType)) {
for (Map.Entry<String, Object> entry : rawKeys.entrySet()) {
Map<String, Object> ref = new HashMap<String, Object>();
ref.put(StateValueUtils.REFERENCE_KEY, entry.getValue());
entry.setValue(ref);
}
}
Map<?, ?> convertedKeys = (Map<?, ?>) StateValueUtils.toJavaValue(query.getDatabase(), null, field, "map/" + itemType, rawKeys);
for (int j = 0; j < groupingsSize; ++ j) {
groupings.get(j).getKeys().set(i, convertedKeys.get(String.valueOf(j)));
}
}
}
return new PaginatedResult<Grouping<T>>(offset, limit, groupingsCount, groupings);
} catch (SQLException ex) {
throw createQueryException(ex, sqlQuery, query);
} finally {
closeResources(query, connection, statement, result);
}
}
/** SQL-specific implementation of {@link Grouping}. */
private class SqlGrouping<T> extends AbstractGrouping<T> {
private long count;
public SqlGrouping(List<Object> keys, Query<T> query, String[] fields, long count) {
super(keys, query, fields);
this.count = count;
}
// --- AbstractGrouping support ---
@Override
protected Aggregate createAggregate(String field) {
throw new UnsupportedOperationException();
}
@Override
public long getCount() {
return count;
}
}
@Override
protected void beginTransaction(Connection connection, boolean isImmediate) throws SQLException {
connection.setAutoCommit(false);
}
@Override
protected void commitTransaction(Connection connection, boolean isImmediate) throws SQLException {
connection.commit();
}
@Override
protected void rollbackTransaction(Connection connection, boolean isImmediate) throws SQLException {
connection.rollback();
}
@Override
protected void endTransaction(Connection connection, boolean isImmediate) throws SQLException {
connection.setAutoCommit(true);
}
@Override
protected void doSaves(Connection connection, boolean isImmediate, List<State> states) throws SQLException {
List<State> indexStates = null;
for (State state1 : states) {
if (Boolean.TRUE.equals(state1.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates = new ArrayList<State>();
for (State state2 : states) {
if (!Boolean.TRUE.equals(state2.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates.add(state2);
}
}
break;
}
}
if (indexStates == null) {
indexStates = states;
}
SqlIndex.Static.deleteByStates(this, connection, indexStates);
Map<State, String> inRowIndexes = SqlIndex.Static.insertByStates(this, connection, indexStates);
boolean hasInRowIndex = hasInRowIndex();
SqlVendor vendor = getVendor();
double now = System.currentTimeMillis() / 1000.0;
for (State state : states) {
boolean isNew = state.isNew();
boolean saveInRowIndex = hasInRowIndex && !Boolean.TRUE.equals(state.getExtra(SKIP_INDEX_STATE_EXTRA));
UUID id = state.getId();
UUID typeId = state.getTypeId();
byte[] dataBytes = null;
String inRowIndex = inRowIndexes.get(state);
byte[] inRowIndexBytes = inRowIndex != null ? inRowIndex.getBytes(StringUtils.UTF_8) : new byte[0];
while (true) {
if (isNew) {
try {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, DATA_COLUMN);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, IN_ROW_INDEX_COLUMN);
}
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, dataBytes, parameters);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, inRowIndexBytes, parameters);
}
insertBuilder.append(")");
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<AtomicOperation> atomicOperations = state.getAtomicOperations();
if (atomicOperations.isEmpty()) {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
if (saveInRowIndex) {
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
updateBuilder.append(",");
}
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
} else {
Object oldObject = Query.
from(Object.class).
where("_id = ?", id).
using(this).
- resolveToReferenceOnly().
option(CONNECTION_QUERY_OPTION, connection).
option(RETURN_ORIGINAL_DATA_QUERY_OPTION, Boolean.TRUE).
option(USE_READ_DATA_SOURCE_QUERY_OPTION, Boolean.FALSE).
first();
if (oldObject == null) {
retryWrites();
break;
}
State oldState = State.getInstance(oldObject);
UUID oldTypeId = oldState.getTypeId();
byte[] oldData = Static.getOriginalData(oldObject);
state.setValues(oldState.getValues());
for (AtomicOperation operation : atomicOperations) {
String field = operation.getField();
state.putValue(field, oldState.getValue(field));
}
for (AtomicOperation operation : atomicOperations) {
operation.execute(state);
}
dataBytes = serializeState(state);
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
if (saveInRowIndex) {
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
}
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldTypeId, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldData, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
retryWrites();
break;
}
}
}
break;
}
while (true) {
if (isNew) {
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_UPDATE_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, UPDATE_DATE_COLUMN);
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, now, parameters);
insertBuilder.append(")");
try {
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_UPDATE_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, UPDATE_DATE_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, now, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
}
break;
}
}
}
@Override
protected void doIndexes(Connection connection, boolean isImmediate, List<State> states) throws SQLException {
SqlIndex.Static.deleteByStates(this, connection, states);
Map<State, String> inRowIndexes = SqlIndex.Static.insertByStates(this, connection, states);
if (!hasInRowIndex()) {
return;
}
SqlVendor vendor = getVendor();
for (Map.Entry<State, String> entry : inRowIndexes.entrySet()) {
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendValue(updateBuilder, entry.getValue());
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendValue(updateBuilder, entry.getKey().getId());
Static.executeUpdateWithArray(connection, updateBuilder.toString());
}
}
/** @deprecated Use {@link #index} instead. */
@Deprecated
public void fixIndexes(List<State> states) {
Connection connection = openConnection();
try {
doIndexes(connection, true, states);
} catch (SQLException ex) {
List<UUID> ids = new ArrayList<UUID>();
for (State state : states) {
ids.add(state.getId());
}
throw new SqlDatabaseException(this, String.format(
"Can't index states! (%s)", ids));
} finally {
closeConnection(connection);
}
}
@Override
protected void doDeletes(Connection connection, boolean isImmediate, List<State> states) throws SQLException {
SqlVendor vendor = getVendor();
StringBuilder whereBuilder = new StringBuilder();
whereBuilder.append(" WHERE ");
vendor.appendIdentifier(whereBuilder, ID_COLUMN);
whereBuilder.append(" IN (");
for (State state : states) {
vendor.appendValue(whereBuilder, state.getId());
whereBuilder.append(",");
}
whereBuilder.setCharAt(whereBuilder.length() - 1, ')');
StringBuilder deleteBuilder = new StringBuilder();
deleteBuilder.append("DELETE FROM ");
vendor.appendIdentifier(deleteBuilder, RECORD_TABLE);
deleteBuilder.append(whereBuilder);
Static.executeUpdateWithArray(connection, deleteBuilder.toString());
SqlIndex.Static.deleteByStates(this, connection, states);
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_UPDATE_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, UPDATE_DATE_COLUMN);
updateBuilder.append("=");
vendor.appendValue(updateBuilder, System.currentTimeMillis() / 1000.0);
updateBuilder.append(whereBuilder);
Static.executeUpdateWithArray(connection, updateBuilder.toString());
}
@FieldData.FieldInternalNamePrefix("sql.")
public static class FieldData extends Modification<ObjectField> {
private String indexTable;
private boolean indexTableReadOnly;
private boolean indexTableSameColumnNames;
private boolean indexTableSource;
public String getIndexTable() {
return indexTable;
}
public void setIndexTable(String indexTable) {
this.indexTable = indexTable;
}
public boolean isIndexTableReadOnly() {
return indexTableReadOnly;
}
public void setIndexTableReadOnly(boolean indexTableReadOnly) {
this.indexTableReadOnly = indexTableReadOnly;
}
public boolean isIndexTableSameColumnNames() {
return indexTableSameColumnNames;
}
public void setIndexTableSameColumnNames(boolean indexTableSameColumnNames) {
this.indexTableSameColumnNames = indexTableSameColumnNames;
}
public boolean isIndexTableSource() {
return indexTableSource;
}
public void setIndexTableSource(boolean indexTableSource) {
this.indexTableSource = indexTableSource;
}
public boolean isIndexTableSourceFromAnywhere() {
if (isIndexTableSource()) {
return true;
}
ObjectField field = getOriginalObject();
ObjectStruct parent = field.getParent();
String fieldName = field.getInternalName();
for (ObjectIndex index : parent.getIndexes()) {
List<String> indexFieldNames = index.getFields();
if (!indexFieldNames.isEmpty() &&
indexFieldNames.contains(fieldName)) {
String firstIndexFieldName = indexFieldNames.get(0);
if (!fieldName.equals(firstIndexFieldName)) {
ObjectField firstIndexField = parent.getField(firstIndexFieldName);
if (firstIndexField != null) {
return firstIndexField.as(FieldData.class).isIndexTableSource();
}
}
}
}
return false;
}
}
/** Specifies the name of the table for storing target field values. */
@Documented
@ObjectField.AnnotationProcessorClass(FieldIndexTableProcessor.class)
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface FieldIndexTable {
String value();
boolean readOnly() default false;
boolean sameColumnNames() default false;
boolean source() default false;
}
private static class FieldIndexTableProcessor implements ObjectField.AnnotationProcessor<FieldIndexTable> {
@Override
public void process(ObjectType type, ObjectField field, FieldIndexTable annotation) {
FieldData data = field.as(FieldData.class);
data.setIndexTable(annotation.value());
data.setIndexTableSameColumnNames(annotation.sameColumnNames());
data.setIndexTableSource(annotation.source());
data.setIndexTableReadOnly(annotation.readOnly());
}
}
/** {@link SqlDatabase} utility methods. */
public static final class Static {
private Static() {
}
public static List<SqlDatabase> getAll() {
return INSTANCES;
}
public static void deregisterAllDrivers() {
for (WeakReference<Driver> driverRef : REGISTERED_DRIVERS) {
Driver driver = driverRef.get();
if (driver != null) {
LOGGER.info("Deregistering [{}]", driver);
try {
DriverManager.deregisterDriver(driver);
} catch (SQLException ex) {
LOGGER.warn("Can't deregister [{}]!", driver);
}
}
}
}
/**
* Log a batch update exception with values.
*/
static void logBatchUpdateException(BatchUpdateException bue, String sqlQuery, List<? extends List<?>> parameters) {
int i = 0;
int failureOffset = bue.getUpdateCounts().length;
List<?> rowData = parameters.get(failureOffset);
StringBuilder errorBuilder = new StringBuilder();
errorBuilder.append("Batch update failed with query '");
errorBuilder.append(sqlQuery);
errorBuilder.append("' with values (");
for (Object value : rowData) {
if (i++ != 0) {
errorBuilder.append(", ");
}
if (value instanceof byte[]) {
errorBuilder.append(StringUtils.hex((byte[]) value));
} else {
errorBuilder.append(value);
}
}
errorBuilder.append(")");
Exception ex = bue.getNextException() != null ? bue.getNextException() : bue;
LOGGER.error(errorBuilder.toString(), ex);
}
static void logUpdateException(String sqlQuery, List<?> parameters) {
int i = 0;
StringBuilder errorBuilder = new StringBuilder();
errorBuilder.append("Batch update failed with query '");
errorBuilder.append(sqlQuery);
errorBuilder.append("' with values (");
for (Object value : parameters) {
if (i++ != 0) {
errorBuilder.append(", ");
}
if (value instanceof byte[]) {
errorBuilder.append(StringUtils.hex((byte[]) value));
} else {
errorBuilder.append(value);
}
}
errorBuilder.append(")");
LOGGER.error(errorBuilder.toString());
}
// Safely binds the given parameter to the given statement at the
// given index.
private static void bindParameter(PreparedStatement statement, int index, Object parameter) throws SQLException {
if (parameter instanceof String) {
parameter = ((String) parameter).getBytes(StringUtils.UTF_8);
}
if (parameter instanceof byte[]) {
byte[] parameterBytes = (byte[]) parameter;
int parameterBytesLength = parameterBytes.length;
if (parameterBytesLength > 2000) {
statement.setBinaryStream(index, new ByteArrayInputStream(parameterBytes), parameterBytesLength);
return;
}
}
statement.setObject(index, parameter);
}
/**
* Executes the given batch update {@code sqlQuery} with the given
* list of {@code parameters} within the given {@code connection}.
*
* @return Array of number of rows affected by the update query.
*/
public static int[] executeBatchUpdate(
Connection connection,
String sqlQuery,
List<? extends List<?>> parameters) throws SQLException {
PreparedStatement prepared = connection.prepareStatement(sqlQuery);
List<?> currentRow = null;
try {
for (List<?> row : parameters) {
currentRow = row;
int columnIndex = 1;
for (Object parameter : row) {
bindParameter(prepared, columnIndex, parameter);
columnIndex++;
}
prepared.addBatch();
}
int[] affected = null;
Stats.Timer timer = STATS.startTimer();
Profiler.Static.startThreadEvent(UPDATE_PROFILER_EVENT);
try {
return (affected = prepared.executeBatch());
} finally {
double time = timer.stop(UPDATE_STATS_OPERATION);
Profiler.Static.stopThreadEvent(sqlQuery);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"SQL batch update: [{}], Parameters: {}, Affected: {}, Time: [{}]ms",
new Object[] { sqlQuery, parameters, affected != null ? Arrays.toString(affected) : "[]", time });
}
}
} catch (SQLException error) {
logUpdateException(sqlQuery, currentRow);
throw error;
} finally {
try {
prepared.close();
} catch (SQLException error) {
}
}
}
/**
* Executes the given update {@code sqlQuery} with the given
* {@code parameters} within the given {@code connection}.
*
* @return Number of rows affected by the update query.
*/
public static int executeUpdateWithList(
Connection connection,
String sqlQuery,
List<?> parameters)
throws SQLException {
if (parameters == null) {
return executeUpdateWithArray(connection, sqlQuery);
} else {
Object[] array = parameters.toArray(new Object[parameters.size()]);
return executeUpdateWithArray(connection, sqlQuery, array);
}
}
/**
* Executes the given update {@code sqlQuery} with the given
* {@code parameters} within the given {@code connection}.
*
* @return Number of rows affected by the update query.
*/
public static int executeUpdateWithArray(
Connection connection,
String sqlQuery,
Object... parameters)
throws SQLException {
boolean hasParameters = parameters != null && parameters.length > 0;
PreparedStatement prepared;
Statement statement;
if (hasParameters) {
prepared = connection.prepareStatement(sqlQuery);
statement = prepared;
} else {
prepared = null;
statement = connection.createStatement();
}
try {
if (hasParameters) {
for (int i = 0; i < parameters.length; i++) {
bindParameter(prepared, i + 1, parameters[i]);
}
}
Integer affected = null;
Stats.Timer timer = STATS.startTimer();
Profiler.Static.startThreadEvent(UPDATE_PROFILER_EVENT);
try {
return (affected = hasParameters ?
prepared.executeUpdate() :
statement.executeUpdate(sqlQuery));
} finally {
double time = timer.stop(UPDATE_STATS_OPERATION);
Profiler.Static.stopThreadEvent(sqlQuery);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"SQL update: [{}], Affected: [{}], Time: [{}]ms",
new Object[] { fillPlaceholders(sqlQuery, parameters), affected, time });
}
}
} finally {
try {
statement.close();
} catch (SQLException ex) {
}
}
}
/**
* Returns {@code true} if the given {@code error} looks like a
* {@link SQLIntegrityConstraintViolationException}.
*/
public static boolean isIntegrityConstraintViolation(SQLException error) {
if (error instanceof SQLIntegrityConstraintViolationException) {
return true;
} else {
String state = error.getSQLState();
return state != null && state.startsWith("23");
}
}
/**
* Returns the name of the table for storing the values of the
* given {@code index}.
*/
public static String getIndexTable(ObjectIndex index) {
return ObjectUtils.to(String.class, index.getOptions().get(INDEX_TABLE_INDEX_OPTION));
}
/**
* Sets the name of the table for storing the values of the
* given {@code index}.
*/
public static void setIndexTable(ObjectIndex index, String table) {
index.getOptions().put(INDEX_TABLE_INDEX_OPTION, table);
}
public static Object getExtraColumn(Object object, String name) {
return State.getInstance(object).getExtra(EXTRA_COLUMN_EXTRA_PREFIX + name);
}
public static byte[] getOriginalData(Object object) {
return (byte[]) State.getInstance(object).getExtra(ORIGINAL_DATA_EXTRA);
}
// --- Deprecated ---
/** @deprecated Use {@link #executeUpdateWithArray} instead. */
@Deprecated
public static int executeUpdate(
Connection connection,
String sqlQuery,
Object... parameters)
throws SQLException {
return executeUpdateWithArray(connection, sqlQuery, parameters);
}
}
// --- Deprecated ---
/** @deprecated No replacement. */
@Deprecated
public void beginThreadLocalReadConnection() {
}
/** @deprecated No replacement. */
@Deprecated
public void endThreadLocalReadConnection() {
}
}
| true | true | protected void doSaves(Connection connection, boolean isImmediate, List<State> states) throws SQLException {
List<State> indexStates = null;
for (State state1 : states) {
if (Boolean.TRUE.equals(state1.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates = new ArrayList<State>();
for (State state2 : states) {
if (!Boolean.TRUE.equals(state2.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates.add(state2);
}
}
break;
}
}
if (indexStates == null) {
indexStates = states;
}
SqlIndex.Static.deleteByStates(this, connection, indexStates);
Map<State, String> inRowIndexes = SqlIndex.Static.insertByStates(this, connection, indexStates);
boolean hasInRowIndex = hasInRowIndex();
SqlVendor vendor = getVendor();
double now = System.currentTimeMillis() / 1000.0;
for (State state : states) {
boolean isNew = state.isNew();
boolean saveInRowIndex = hasInRowIndex && !Boolean.TRUE.equals(state.getExtra(SKIP_INDEX_STATE_EXTRA));
UUID id = state.getId();
UUID typeId = state.getTypeId();
byte[] dataBytes = null;
String inRowIndex = inRowIndexes.get(state);
byte[] inRowIndexBytes = inRowIndex != null ? inRowIndex.getBytes(StringUtils.UTF_8) : new byte[0];
while (true) {
if (isNew) {
try {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, DATA_COLUMN);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, IN_ROW_INDEX_COLUMN);
}
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, dataBytes, parameters);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, inRowIndexBytes, parameters);
}
insertBuilder.append(")");
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<AtomicOperation> atomicOperations = state.getAtomicOperations();
if (atomicOperations.isEmpty()) {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
if (saveInRowIndex) {
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
updateBuilder.append(",");
}
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
} else {
Object oldObject = Query.
from(Object.class).
where("_id = ?", id).
using(this).
resolveToReferenceOnly().
option(CONNECTION_QUERY_OPTION, connection).
option(RETURN_ORIGINAL_DATA_QUERY_OPTION, Boolean.TRUE).
option(USE_READ_DATA_SOURCE_QUERY_OPTION, Boolean.FALSE).
first();
if (oldObject == null) {
retryWrites();
break;
}
State oldState = State.getInstance(oldObject);
UUID oldTypeId = oldState.getTypeId();
byte[] oldData = Static.getOriginalData(oldObject);
state.setValues(oldState.getValues());
for (AtomicOperation operation : atomicOperations) {
String field = operation.getField();
state.putValue(field, oldState.getValue(field));
}
for (AtomicOperation operation : atomicOperations) {
operation.execute(state);
}
dataBytes = serializeState(state);
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
if (saveInRowIndex) {
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
}
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldTypeId, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldData, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
retryWrites();
break;
}
}
}
break;
}
while (true) {
if (isNew) {
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_UPDATE_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, UPDATE_DATE_COLUMN);
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, now, parameters);
insertBuilder.append(")");
try {
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_UPDATE_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, UPDATE_DATE_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, now, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
}
break;
}
}
}
| protected void doSaves(Connection connection, boolean isImmediate, List<State> states) throws SQLException {
List<State> indexStates = null;
for (State state1 : states) {
if (Boolean.TRUE.equals(state1.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates = new ArrayList<State>();
for (State state2 : states) {
if (!Boolean.TRUE.equals(state2.getExtra(SKIP_INDEX_STATE_EXTRA))) {
indexStates.add(state2);
}
}
break;
}
}
if (indexStates == null) {
indexStates = states;
}
SqlIndex.Static.deleteByStates(this, connection, indexStates);
Map<State, String> inRowIndexes = SqlIndex.Static.insertByStates(this, connection, indexStates);
boolean hasInRowIndex = hasInRowIndex();
SqlVendor vendor = getVendor();
double now = System.currentTimeMillis() / 1000.0;
for (State state : states) {
boolean isNew = state.isNew();
boolean saveInRowIndex = hasInRowIndex && !Boolean.TRUE.equals(state.getExtra(SKIP_INDEX_STATE_EXTRA));
UUID id = state.getId();
UUID typeId = state.getTypeId();
byte[] dataBytes = null;
String inRowIndex = inRowIndexes.get(state);
byte[] inRowIndexBytes = inRowIndex != null ? inRowIndex.getBytes(StringUtils.UTF_8) : new byte[0];
while (true) {
if (isNew) {
try {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, DATA_COLUMN);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, IN_ROW_INDEX_COLUMN);
}
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, dataBytes, parameters);
if (saveInRowIndex) {
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, inRowIndexBytes, parameters);
}
insertBuilder.append(")");
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<AtomicOperation> atomicOperations = state.getAtomicOperations();
if (atomicOperations.isEmpty()) {
if (dataBytes == null) {
dataBytes = serializeState(state);
}
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
if (saveInRowIndex) {
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
updateBuilder.append(",");
}
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
} else {
Object oldObject = Query.
from(Object.class).
where("_id = ?", id).
using(this).
option(CONNECTION_QUERY_OPTION, connection).
option(RETURN_ORIGINAL_DATA_QUERY_OPTION, Boolean.TRUE).
option(USE_READ_DATA_SOURCE_QUERY_OPTION, Boolean.FALSE).
first();
if (oldObject == null) {
retryWrites();
break;
}
State oldState = State.getInstance(oldObject);
UUID oldTypeId = oldState.getTypeId();
byte[] oldData = Static.getOriginalData(oldObject);
state.setValues(oldState.getValues());
for (AtomicOperation operation : atomicOperations) {
String field = operation.getField();
state.putValue(field, oldState.getValue(field));
}
for (AtomicOperation operation : atomicOperations) {
operation.execute(state);
}
dataBytes = serializeState(state);
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
if (saveInRowIndex) {
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, IN_ROW_INDEX_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, inRowIndexBytes, parameters);
}
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, dataBytes, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldTypeId, parameters);
updateBuilder.append(" AND ");
vendor.appendIdentifier(updateBuilder, DATA_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, oldData, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
retryWrites();
break;
}
}
}
break;
}
while (true) {
if (isNew) {
List<Object> parameters = new ArrayList<Object>();
StringBuilder insertBuilder = new StringBuilder();
insertBuilder.append("INSERT INTO ");
vendor.appendIdentifier(insertBuilder, RECORD_UPDATE_TABLE);
insertBuilder.append(" (");
vendor.appendIdentifier(insertBuilder, ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, TYPE_ID_COLUMN);
insertBuilder.append(",");
vendor.appendIdentifier(insertBuilder, UPDATE_DATE_COLUMN);
insertBuilder.append(") VALUES (");
vendor.appendBindValue(insertBuilder, id, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, typeId, parameters);
insertBuilder.append(",");
vendor.appendBindValue(insertBuilder, now, parameters);
insertBuilder.append(")");
try {
Static.executeUpdateWithList(connection, insertBuilder.toString(), parameters);
} catch (SQLException ex) {
if (Static.isIntegrityConstraintViolation(ex)) {
isNew = false;
continue;
} else {
throw ex;
}
}
} else {
List<Object> parameters = new ArrayList<Object>();
StringBuilder updateBuilder = new StringBuilder();
updateBuilder.append("UPDATE ");
vendor.appendIdentifier(updateBuilder, RECORD_UPDATE_TABLE);
updateBuilder.append(" SET ");
vendor.appendIdentifier(updateBuilder, TYPE_ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, typeId, parameters);
updateBuilder.append(",");
vendor.appendIdentifier(updateBuilder, UPDATE_DATE_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, now, parameters);
updateBuilder.append(" WHERE ");
vendor.appendIdentifier(updateBuilder, ID_COLUMN);
updateBuilder.append("=");
vendor.appendBindValue(updateBuilder, id, parameters);
if (Static.executeUpdateWithList(connection, updateBuilder.toString(), parameters) < 1) {
isNew = true;
continue;
}
}
break;
}
}
}
|
diff --git a/src/main/java/com/ethlo/web/webclient/plugins/FilterPluginCsrf.java b/src/main/java/com/ethlo/web/webclient/plugins/FilterPluginCsrf.java
index 490c7bc..f4dedb8 100644
--- a/src/main/java/com/ethlo/web/webclient/plugins/FilterPluginCsrf.java
+++ b/src/main/java/com/ethlo/web/webclient/plugins/FilterPluginCsrf.java
@@ -1,87 +1,87 @@
package com.ethlo.web.webclient.plugins;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.file.AccessDeniedException;
import java.security.SecureRandom;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ethlo.web.filtermapping.MultiMatcherFilter;
/**
*
* @author mha
*/
public class FilterPluginCsrf extends BeforeFilterPlugin
{
private final Logger logger = LoggerFactory.getLogger(MultiMatcherFilter.class);
public static final String CSRF_PROTECTION_ATTR_NAME = "_CSRF_TOKEN";
public static final String CSRF_HEADER_NAME = "X-CSRF";
public static final String CSRF_ACTIVE_ATTR_NAME = "_CSRF_PROTECTION_ACTIVE";
private SecureRandom random = new SecureRandom();
@Override
public boolean doFilterBefore(HttpServletRequest request, HttpServletResponse response) throws IOException
{
final HttpSession session = request.getSession(true);
final boolean hasActivatorReqParam = request.getParameter(CSRF_ACTIVE_ATTR_NAME) != null;
String sessionToken = (String) session.getAttribute(CSRF_PROTECTION_ATTR_NAME);
if (sessionToken == null && hasActivatorReqParam)
{
sessionToken = generateToken();
session.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
logger.info("Generated new session-wide CSRF protection token");
}
- else
+ else if (sessionToken != null)
{
final String headerToken = request.getHeader(CSRF_HEADER_NAME);
if (headerToken == null)
{
logger.info("CSRF header token not found");
- response.sendError(HttpServletResponse.SC_FORBIDDEN);
+ response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt. Missing CSRF request token");
return false;
}
else if (! sessionToken.equals(headerToken))
{
logger.warn("CSRF header token {} did not match session token", headerToken);
- response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt");
+ response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt. Invalid CSRF request token");
return false;
}
}
request.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
return true;
}
public static String getToken(HttpServletRequest request)
{
final HttpSession session = request.getSession(false);
if (session != null)
{
return (String) session.getAttribute(FilterPluginCsrf.CSRF_PROTECTION_ATTR_NAME);
}
return null;
}
private String generateToken()
{
return new BigInteger(130, random).toString(36).toUpperCase();
}
public class CsrfAccessDeniedException extends AccessDeniedException
{
private static final long serialVersionUID = -3068335379678483344L;
public CsrfAccessDeniedException()
{
super("Invalid cross-site requst forgery token");
}
}
}
| false | true | public boolean doFilterBefore(HttpServletRequest request, HttpServletResponse response) throws IOException
{
final HttpSession session = request.getSession(true);
final boolean hasActivatorReqParam = request.getParameter(CSRF_ACTIVE_ATTR_NAME) != null;
String sessionToken = (String) session.getAttribute(CSRF_PROTECTION_ATTR_NAME);
if (sessionToken == null && hasActivatorReqParam)
{
sessionToken = generateToken();
session.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
logger.info("Generated new session-wide CSRF protection token");
}
else
{
final String headerToken = request.getHeader(CSRF_HEADER_NAME);
if (headerToken == null)
{
logger.info("CSRF header token not found");
response.sendError(HttpServletResponse.SC_FORBIDDEN);
return false;
}
else if (! sessionToken.equals(headerToken))
{
logger.warn("CSRF header token {} did not match session token", headerToken);
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt");
return false;
}
}
request.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
return true;
}
| public boolean doFilterBefore(HttpServletRequest request, HttpServletResponse response) throws IOException
{
final HttpSession session = request.getSession(true);
final boolean hasActivatorReqParam = request.getParameter(CSRF_ACTIVE_ATTR_NAME) != null;
String sessionToken = (String) session.getAttribute(CSRF_PROTECTION_ATTR_NAME);
if (sessionToken == null && hasActivatorReqParam)
{
sessionToken = generateToken();
session.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
logger.info("Generated new session-wide CSRF protection token");
}
else if (sessionToken != null)
{
final String headerToken = request.getHeader(CSRF_HEADER_NAME);
if (headerToken == null)
{
logger.info("CSRF header token not found");
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt. Missing CSRF request token");
return false;
}
else if (! sessionToken.equals(headerToken))
{
logger.warn("CSRF header token {} did not match session token", headerToken);
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Possible cross site forgery attempt. Invalid CSRF request token");
return false;
}
}
request.setAttribute(CSRF_PROTECTION_ATTR_NAME, sessionToken);
return true;
}
|
diff --git a/nuget-server/src/jetbrains/buildServer/nuget/server/runner/pack/PackRunType.java b/nuget-server/src/jetbrains/buildServer/nuget/server/runner/pack/PackRunType.java
index 1c61c9e7..12bcffb9 100644
--- a/nuget-server/src/jetbrains/buildServer/nuget/server/runner/pack/PackRunType.java
+++ b/nuget-server/src/jetbrains/buildServer/nuget/server/runner/pack/PackRunType.java
@@ -1,109 +1,109 @@
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.buildServer.nuget.server.runner.pack;
import jetbrains.buildServer.agent.ServerProvidedProperties;
import jetbrains.buildServer.nuget.common.PackagesConstants;
import jetbrains.buildServer.nuget.server.runner.NuGetRunType;
import jetbrains.buildServer.nuget.server.util.BasePropertiesProcessor;
import jetbrains.buildServer.parameters.ReferencesResolverUtil;
import jetbrains.buildServer.serverSide.InvalidProperty;
import jetbrains.buildServer.serverSide.PropertiesProcessor;
import jetbrains.buildServer.web.openapi.PluginDescriptor;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static jetbrains.buildServer.nuget.common.PackagesConstants.*;
/**
* @author Eugene Petrenko ([email protected])
* Date: 22.08.11 21:05
*/
public class PackRunType extends NuGetRunType {
public PackRunType(@NotNull final PluginDescriptor descriptor) {
super(descriptor);
}
@NotNull
@Override
public String getType() {
return PackagesConstants.PACK_RUN_TYPE;
}
@Override
public String getDisplayName() {
return "NuGet Packages Pack";
}
@Override
public String getDescription() {
return "Creates NuGet package from a given spec file";
}
@NotNull
@Override
public String describeParameters(@NotNull Map<String, String> parameters) {
StringBuilder sb = new StringBuilder();
sb.append("Pack: ").append(parameters.get(PackagesConstants.NUGET_PACK_SPEC_FILE)).append("\n");
sb.append("Version: ").append(parameters.get(PackagesConstants.NUGET_PACK_VERSION)).append("\n");
return sb.toString();
}
@NotNull
@Override
public PropertiesProcessor getRunnerPropertiesProcessor() {
return new BasePropertiesProcessor() {
@Override
protected void checkProperties(@NotNull Map<String, String> map, @NotNull Collection<InvalidProperty> result) {
notEmpty(NUGET_PATH, "Path to nuget.exe must be specified", map, result);
notEmpty(NUGET_PACK_SPEC_FILE, "Package definition files must be specified", map, result);
notEmpty(NUGET_PACK_OUTPUT_DIR, "Package creation output directory must be specified", map, result);
final String version = notEmpty(NUGET_PACK_VERSION, "Version must be specified", map, result);
- if (version != null && !ReferencesResolverUtil.containsReference(version)) {
+ if (version != null && !ReferencesResolverUtil.containsReference(version) && !version.matches("\\d+(\\.\\d+){1,3}")) {
result.add(new InvalidProperty(NUGET_PACK_VERSION, "Version must be NuGet version format, i.e. 1.2.3 or 5.4.3.2"));
}
//TODO: check properties are well-formed
}
};
}
@NotNull
@Override
protected String getEditJsp() {
return "pack/editPack.jsp";
}
@NotNull
@Override
protected String getViewJsp() {
return "pack/viewPack.jsp";
}
@Override
public Map<String, String> getDefaultRunnerProperties() {
return new HashMap<String, String>(){{
put(PackagesConstants.NUGET_PACK_VERSION, "0." + ReferencesResolverUtil.makeReference(ServerProvidedProperties.BUILD_NUMBER_PROP));
put(PackagesConstants.NUGET_PACK_OUTPUT_CLEAR, "checked");
put(PackagesConstants.NUGET_PACK_PROPERTIES, "Configuration=Release");
}};
}
}
| true | true | public PropertiesProcessor getRunnerPropertiesProcessor() {
return new BasePropertiesProcessor() {
@Override
protected void checkProperties(@NotNull Map<String, String> map, @NotNull Collection<InvalidProperty> result) {
notEmpty(NUGET_PATH, "Path to nuget.exe must be specified", map, result);
notEmpty(NUGET_PACK_SPEC_FILE, "Package definition files must be specified", map, result);
notEmpty(NUGET_PACK_OUTPUT_DIR, "Package creation output directory must be specified", map, result);
final String version = notEmpty(NUGET_PACK_VERSION, "Version must be specified", map, result);
if (version != null && !ReferencesResolverUtil.containsReference(version)) {
result.add(new InvalidProperty(NUGET_PACK_VERSION, "Version must be NuGet version format, i.e. 1.2.3 or 5.4.3.2"));
}
//TODO: check properties are well-formed
}
};
}
| public PropertiesProcessor getRunnerPropertiesProcessor() {
return new BasePropertiesProcessor() {
@Override
protected void checkProperties(@NotNull Map<String, String> map, @NotNull Collection<InvalidProperty> result) {
notEmpty(NUGET_PATH, "Path to nuget.exe must be specified", map, result);
notEmpty(NUGET_PACK_SPEC_FILE, "Package definition files must be specified", map, result);
notEmpty(NUGET_PACK_OUTPUT_DIR, "Package creation output directory must be specified", map, result);
final String version = notEmpty(NUGET_PACK_VERSION, "Version must be specified", map, result);
if (version != null && !ReferencesResolverUtil.containsReference(version) && !version.matches("\\d+(\\.\\d+){1,3}")) {
result.add(new InvalidProperty(NUGET_PACK_VERSION, "Version must be NuGet version format, i.e. 1.2.3 or 5.4.3.2"));
}
//TODO: check properties are well-formed
}
};
}
|
diff --git a/nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/helpers/EventManager.java b/nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/helpers/EventManager.java
index a01538058..30d2763b8 100644
--- a/nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/helpers/EventManager.java
+++ b/nuxeo-platform-webapp-core/src/main/java/org/nuxeo/ecm/webapp/helpers/EventManager.java
@@ -1,201 +1,203 @@
/*
* (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Nuxeo - initial API and implementation
*
* $Id$
*/
package org.nuxeo.ecm.webapp.helpers;
import static org.jboss.seam.ScopeType.APPLICATION;
import static org.jboss.seam.annotations.Install.FRAMEWORK;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jboss.seam.annotations.Install;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.annotations.Startup;
import org.jboss.seam.core.Events;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.platform.ui.web.shield.NuxeoJavaBeanErrorHandler;
/**
* Knows what events need to be raised based on the user selected document.
*
* @author <a href="mailto:[email protected]">Razvan Caraghin</a>
*
*/
@Name("eventManager")
@Scope(APPLICATION)
@Startup
@NuxeoJavaBeanErrorHandler
@Install(precedence=FRAMEWORK)
public class EventManager implements Serializable {
private static final long serialVersionUID = -7572053704069819975L;
private static final Log log = LogFactory.getLog(EventManager.class);
/**
* Raises events on going home, will be processed immediately.
*
* @return events fired
*/
public static List<String> raiseEventsOnGoingHome() {
List<String> eventsFired = new ArrayList<String>();
Events evtManager = Events.instance();
log.debug("Fire Event: " + EventNames.LOCATION_SELECTION_CHANGED);
evtManager.raiseEvent(EventNames.LOCATION_SELECTION_CHANGED);
eventsFired.add(EventNames.LOCATION_SELECTION_CHANGED);
log.debug("Fire Event: " + EventNames.GO_HOME);
evtManager.raiseEvent(EventNames.GO_HOME);
eventsFired.add(EventNames.GO_HOME);
return eventsFired;
}
/**
* Raises events on location selection change, will be processed immediately.
*
* @return events fired
*/
public static List<String> raiseEventsOnLocationSelectionChanged() {
List<String> eventsFired = new ArrayList<String>();
Events evtManager = Events.instance();
log.debug("Fire Event: " + EventNames.LOCATION_SELECTION_CHANGED);
evtManager.raiseEvent(EventNames.LOCATION_SELECTION_CHANGED);
eventsFired.add(EventNames.LOCATION_SELECTION_CHANGED);
log.debug("Fire Event: "
+ EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
evtManager.raiseEvent(
EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
eventsFired.add(EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
return eventsFired;
}
/**
* Fires the necessary events so that the nuxeo infrastructure components get
* updated. The raised events will be processed immediately, before this
* call is ended. Intended to be used when a document gets selected. If the
* docType is NULL then the GO_HOME event is fired.
*
* @param docType
* @return events fired
*/
public static List<String> raiseEventsOnDocumentSelected(DocumentModel document) {
List<String> eventsFired = new ArrayList<String>();
if (document == null) {
// XXX AT: kind of BBB, not sure why this was used like this
eventsFired = raiseEventsOnLocationSelectionChanged();
} else {
- Events evtManager = Events.instance();
+ Events evtManager = Events.instance();
String docType = document.getType();
String eventName;
if ("Domain".equals(docType)) {
eventName = EventNames.DOMAIN_SELECTION_CHANGED;
+ } else if ("Root".equals(docType)) {
+ eventName = EventNames.GO_HOME;
} else if ("WorkspaceRoot".equals(docType)
|| "SectionRoot".equals(docType)) {
eventName = EventNames.CONTENT_ROOT_SELECTION_CHANGED;
} else {
// regular document is selected
eventName = EventNames.DOCUMENT_SELECTION_CHANGED;
}
if (document.isFolder()) {
evtManager.raiseEvent(
EventNames.FOLDERISHDOCUMENT_SELECTION_CHANGED,
document);
}
log.debug("Fire Event: " + eventName);
evtManager.raiseEvent(eventName, document);
eventsFired.add(eventName);
log.debug("Fire Event: "
+ EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
evtManager.raiseEvent(
EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
eventsFired.add(EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
}
return eventsFired;
}
/**
* Fires the necessary events so that the nuxeo infrastructure componets get
* updated. The raised events will be processed immediately, before this
* call is ended. Intended to be used when a document gets edited/changed.
*
* @param docType
* @return events fired
*/
public static List<String> raiseEventsOnDocumentChange(DocumentModel document) {
List<String> eventsFired = new ArrayList<String>();
// TODO: parameterize on document type
Events evtManager = Events.instance();
log.debug("Fire Event: " + EventNames.DOCUMENT_CHANGED);
evtManager.raiseEvent(EventNames.DOCUMENT_CHANGED, document);
eventsFired.add(EventNames.DOCUMENT_CHANGED);
log.debug("Fire Event: "
+ EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
evtManager.raiseEvent(
EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
eventsFired.add(EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
return eventsFired;
}
/**
* Dispatch an event to get interested components informed when a changeable
* document was created (thus not saved) and before the form is displayed.
*
* @param changeableDocument
*/
public static void raiseEventsOnDocumentCreate(DocumentModel document) {
Events.instance().raiseEvent(EventNames.NEW_DOCUMENT_CREATED);
}
/**
* Fires the necessary events so that the nuxeo infrastructure components get
* updated. The raised events will be processed immediately, before this
* call is ended. Intended to be used when a the content of a folderish
* document gets changed.
*
* @param docType
* @return events fired
*/
public static List<String> raiseEventsOnDocumentChildrenChange(DocumentModel document) {
List<String> eventsFired = new ArrayList<String>();
Events.instance().raiseEvent(EventNames.DOCUMENT_CHILDREN_CHANGED, document);
eventsFired.add(EventNames.DOCUMENT_CHILDREN_CHANGED);
return eventsFired;
}
}
| false | true | public static List<String> raiseEventsOnDocumentSelected(DocumentModel document) {
List<String> eventsFired = new ArrayList<String>();
if (document == null) {
// XXX AT: kind of BBB, not sure why this was used like this
eventsFired = raiseEventsOnLocationSelectionChanged();
} else {
Events evtManager = Events.instance();
String docType = document.getType();
String eventName;
if ("Domain".equals(docType)) {
eventName = EventNames.DOMAIN_SELECTION_CHANGED;
} else if ("WorkspaceRoot".equals(docType)
|| "SectionRoot".equals(docType)) {
eventName = EventNames.CONTENT_ROOT_SELECTION_CHANGED;
} else {
// regular document is selected
eventName = EventNames.DOCUMENT_SELECTION_CHANGED;
}
if (document.isFolder()) {
evtManager.raiseEvent(
EventNames.FOLDERISHDOCUMENT_SELECTION_CHANGED,
document);
}
log.debug("Fire Event: " + eventName);
evtManager.raiseEvent(eventName, document);
eventsFired.add(eventName);
log.debug("Fire Event: "
+ EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
evtManager.raiseEvent(
EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
eventsFired.add(EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
}
return eventsFired;
}
| public static List<String> raiseEventsOnDocumentSelected(DocumentModel document) {
List<String> eventsFired = new ArrayList<String>();
if (document == null) {
// XXX AT: kind of BBB, not sure why this was used like this
eventsFired = raiseEventsOnLocationSelectionChanged();
} else {
Events evtManager = Events.instance();
String docType = document.getType();
String eventName;
if ("Domain".equals(docType)) {
eventName = EventNames.DOMAIN_SELECTION_CHANGED;
} else if ("Root".equals(docType)) {
eventName = EventNames.GO_HOME;
} else if ("WorkspaceRoot".equals(docType)
|| "SectionRoot".equals(docType)) {
eventName = EventNames.CONTENT_ROOT_SELECTION_CHANGED;
} else {
// regular document is selected
eventName = EventNames.DOCUMENT_SELECTION_CHANGED;
}
if (document.isFolder()) {
evtManager.raiseEvent(
EventNames.FOLDERISHDOCUMENT_SELECTION_CHANGED,
document);
}
log.debug("Fire Event: " + eventName);
evtManager.raiseEvent(eventName, document);
eventsFired.add(eventName);
log.debug("Fire Event: "
+ EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
evtManager.raiseEvent(
EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
eventsFired.add(EventNames.USER_ALL_DOCUMENT_TYPES_SELECTION_CHANGED);
}
return eventsFired;
}
|
diff --git a/de.hswt.hrm.common/src/de/hswt/hrm/common/database/DatabaseFactory.java b/de.hswt.hrm.common/src/de/hswt/hrm/common/database/DatabaseFactory.java
index 37622aab..34686e1f 100644
--- a/de.hswt.hrm.common/src/de/hswt/hrm/common/database/DatabaseFactory.java
+++ b/de.hswt.hrm.common/src/de/hswt/hrm/common/database/DatabaseFactory.java
@@ -1,58 +1,56 @@
package de.hswt.hrm.common.database;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import static com.google.common.base.Strings.*;
import de.hswt.hrm.common.Config;
import de.hswt.hrm.common.Config.Keys;
import de.hswt.hrm.common.database.exception.DatabaseException;
/**
* Class that is used to get a database connection. This may be changed to injection later on.
*/
public final class DatabaseFactory {
private DatabaseFactory() { }
/**
* Returns a connection object for the database.
*
* @return Connection object for the database.
* @throws DatabaseException If connection could not be created.
*/
public static Connection getConnection() throws DatabaseException {
// load mariadb driver
try {
Class.forName("org.mariadb.jdbc.Driver");
}
catch (ClassNotFoundException e) {
throw new DatabaseException("Database driver not found.", e);
}
- // TODO Remove: Testserver jdbc:mysql://10.154.4.20
Config cfg = Config.getInstance();
final String host = cfg.getProperty(Keys.DB_HOST, "jdbc:mysql://localhost");
final String username = cfg.getProperty(Keys.DB_USER, "root");
final String password = cfg.getProperty(Keys.DB_PASSWORD, "70b145pl4ch7");
final String database = cfg.getProperty(Keys.DB_NAME, "hrm");
// Build connection String
String conStr = host;
if (!isNullOrEmpty(database)) {
conStr += conStr.endsWith("/") ? database : "/" + database;
}
try {
- Connection con = DriverManager.getConnection(conStr, username, password);
- return con;
+ return DriverManager.getConnection(conStr, username, password);
}
catch (SQLException e) {
// TODO maybe add specific information about the error
throw new DatabaseException(e);
}
}
}
| false | true | public static Connection getConnection() throws DatabaseException {
// load mariadb driver
try {
Class.forName("org.mariadb.jdbc.Driver");
}
catch (ClassNotFoundException e) {
throw new DatabaseException("Database driver not found.", e);
}
// TODO Remove: Testserver jdbc:mysql://10.154.4.20
Config cfg = Config.getInstance();
final String host = cfg.getProperty(Keys.DB_HOST, "jdbc:mysql://localhost");
final String username = cfg.getProperty(Keys.DB_USER, "root");
final String password = cfg.getProperty(Keys.DB_PASSWORD, "70b145pl4ch7");
final String database = cfg.getProperty(Keys.DB_NAME, "hrm");
// Build connection String
String conStr = host;
if (!isNullOrEmpty(database)) {
conStr += conStr.endsWith("/") ? database : "/" + database;
}
try {
Connection con = DriverManager.getConnection(conStr, username, password);
return con;
}
catch (SQLException e) {
// TODO maybe add specific information about the error
throw new DatabaseException(e);
}
}
| public static Connection getConnection() throws DatabaseException {
// load mariadb driver
try {
Class.forName("org.mariadb.jdbc.Driver");
}
catch (ClassNotFoundException e) {
throw new DatabaseException("Database driver not found.", e);
}
Config cfg = Config.getInstance();
final String host = cfg.getProperty(Keys.DB_HOST, "jdbc:mysql://localhost");
final String username = cfg.getProperty(Keys.DB_USER, "root");
final String password = cfg.getProperty(Keys.DB_PASSWORD, "70b145pl4ch7");
final String database = cfg.getProperty(Keys.DB_NAME, "hrm");
// Build connection String
String conStr = host;
if (!isNullOrEmpty(database)) {
conStr += conStr.endsWith("/") ? database : "/" + database;
}
try {
return DriverManager.getConnection(conStr, username, password);
}
catch (SQLException e) {
// TODO maybe add specific information about the error
throw new DatabaseException(e);
}
}
|
diff --git a/java/com/couchbase/lite/RevTreeTest.java b/java/com/couchbase/lite/RevTreeTest.java
index 4c9c564a..b58c998d 100644
--- a/java/com/couchbase/lite/RevTreeTest.java
+++ b/java/com/couchbase/lite/RevTreeTest.java
@@ -1,257 +1,257 @@
/**
* Original iOS version by Jens Alfke
* Ported to Android by Marty Schoch
*
* Copyright (c) 2012 Couchbase, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.couchbase.lite;
import com.couchbase.lite.internal.RevisionInternal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class RevTreeTest extends LiteTestCase {
public static final String TAG = "RevTree";
public void testForceInsertEmptyHistory() throws CouchbaseLiteException {
List<String> revHistory = null;
RevisionInternal rev = new RevisionInternal("FakeDocId", "1-tango", false, database);
Map<String, Object> revProperties = new HashMap<String, Object>();
revProperties.put("_id", rev.getDocId());
revProperties.put("_rev", rev.getRevId());
revProperties.put("message", "hi");
rev.setProperties(revProperties);
database.forceInsert(rev, revHistory, null);
}
public void testRevTree() throws CouchbaseLiteException {
RevisionInternal rev = new RevisionInternal("MyDocId", "4-foxy", false, database);
Map<String, Object> revProperties = new HashMap<String, Object>();
revProperties.put("_id", rev.getDocId());
revProperties.put("_rev", rev.getRevId());
revProperties.put("message", "hi");
rev.setProperties(revProperties);
List<String> revHistory = new ArrayList<String>();
revHistory.add(rev.getRevId());
revHistory.add("3-thrice");
revHistory.add("2-too");
revHistory.add("1-won");
database.forceInsert(rev, revHistory, null);
assertEquals(1, database.getDocumentCount());
verifyHistory(database, rev, revHistory);
RevisionInternal conflict = new RevisionInternal("MyDocId", "5-epsilon", false, database);
Map<String, Object> conflictProperties = new HashMap<String, Object>();
conflictProperties.put("_id", conflict.getDocId());
conflictProperties.put("_rev", conflict.getRevId());
conflictProperties.put("message", "yo");
conflict.setProperties(conflictProperties);
List<String> conflictHistory = new ArrayList<String>();
conflictHistory.add(conflict.getRevId());
conflictHistory.add("4-delta");
conflictHistory.add("3-gamma");
conflictHistory.add("2-too");
conflictHistory.add("1-won");
final List wasInConflict = new ArrayList();
final Database.ChangeListener listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
if (event.getChanges().get(0).isConflict()) {
wasInConflict.add(new Object());
}
}
};
database.addChangeListener(listener);
database.forceInsert(conflict, conflictHistory, null);
assertTrue(wasInConflict.size() > 0);
database.removeChangeListener(listener);
assertEquals(1, database.getDocumentCount());
verifyHistory(database, conflict, conflictHistory);
// Add an unrelated document:
RevisionInternal other = new RevisionInternal("AnotherDocID", "1-ichi", false, database);
Map<String,Object> otherProperties = new HashMap<String,Object>();
otherProperties.put("language", "jp");
other.setProperties(otherProperties);
List<String> otherHistory = new ArrayList<String>();
otherHistory.add(other.getRevId());
database.forceInsert(other, otherHistory, null);
// Fetch one of those phantom revisions with no body:
RevisionInternal rev2 = database.getDocumentWithIDAndRev(rev.getDocId(), "2-too", EnumSet.noneOf(Database.TDContentOptions.class));
assertEquals(rev.getDocId(), rev2.getDocId());
assertEquals("2-too", rev2.getRevId());
//Assert.assertNull(rev2.getContent());
// Make sure no duplicate rows were inserted for the common revisions:
assertEquals(8, database.getLastSequenceNumber());
// Make sure the revision with the higher revID wins the conflict:
RevisionInternal current = database.getDocumentWithIDAndRev(rev.getDocId(), null, EnumSet.noneOf(Database.TDContentOptions.class));
assertEquals(conflict, current);
// Get the _changes feed and verify only the winner is in it:
ChangesOptions options = new ChangesOptions();
RevisionList changes = database.changesSince(0, options, null);
RevisionList expectedChanges = new RevisionList();
expectedChanges.add(conflict);
expectedChanges.add(other);
assertEquals(changes, expectedChanges);
options.setIncludeConflicts(true);
changes = database.changesSince(0, options, null);
expectedChanges = new RevisionList();
expectedChanges.add(rev);
expectedChanges.add(conflict);
expectedChanges.add(other);
assertEquals(changes, expectedChanges);
}
/**
* Test that the public API works as expected in change notifications after a rev tree
* insertion. See https://github.com/couchbase/couchbase-lite-android-core/pull/27
*/
public void testRevTreeChangeNotifications() throws CouchbaseLiteException {
final String DOCUMENT_ID = "MyDocId";
// add a document with a single (first) revision
final RevisionInternal rev = new RevisionInternal(DOCUMENT_ID, "1-one", false, database);
Map<String, Object> revProperties = new HashMap<String, Object>();
revProperties.put("_id", rev.getDocId());
revProperties.put("_rev", rev.getRevId());
revProperties.put("message", "hi");
rev.setProperties(revProperties);
List<String> revHistory = Arrays.asList(rev.getRevId());
Database.ChangeListener listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertFalse(change.isConflict());
SavedRevision current = database.getDocument(change.getDocumentId()).getCurrentRevision();
assertEquals(rev.getRevId(), current.getId());
}
};
database.addChangeListener(listener);
database.forceInsert(rev, revHistory, null);
database.removeChangeListener(listener);
// add two more revisions to the document
final RevisionInternal rev3 = new RevisionInternal(DOCUMENT_ID, "3-three", false, database);
Map<String, Object> rev3Properties = new HashMap<String, Object>();
rev3Properties.put("_id", rev3.getDocId());
rev3Properties.put("_rev", rev3.getRevId());
rev3Properties.put("message", "hi again");
rev3.setProperties(rev3Properties);
List<String> rev3History = Arrays.asList(rev3.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev3.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
- // assertFalse(change.isConflict()); commented - currently failing. see https://github.com/couchbase/couchbase-lite-android-core/pull/27
+ assertFalse(change.isConflict());
Document doc = database.getDocument(change.getDocumentId());
assertEquals(rev3.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(rev3, rev3History, null);
database.removeChangeListener(listener);
// add a conflicting revision, with the same history length as the last revision we
// inserted. Since this new revision's revID has a higher ASCII sort, it should become the
// new winning revision.
final RevisionInternal conflictRev = new RevisionInternal(DOCUMENT_ID, "3-winner", false, database);
Map<String, Object> conflictProperties = new HashMap<String, Object>();
conflictProperties.put("_id", conflictRev.getDocId());
conflictProperties.put("_rev", conflictRev.getRevId());
conflictProperties.put("message", "winner");
conflictRev.setProperties(conflictProperties);
List<String> conflictRevHistory = Arrays.asList(conflictRev.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(conflictRev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertTrue(change.isConflict());
Document doc = database.getDocument(change.getDocumentId());
assertEquals(conflictRev.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(2, doc.getConflictingRevisions().size());
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(conflictRev, conflictRevHistory, null);
database.removeChangeListener(listener);
}
private static void verifyHistory(Database db, RevisionInternal rev, List<String> history) {
RevisionInternal gotRev = db.getDocumentWithIDAndRev(rev.getDocId(), null, EnumSet.noneOf(Database.TDContentOptions.class));
assertEquals(rev, gotRev);
assertEquals(rev.getProperties(), gotRev.getProperties());
List<RevisionInternal> revHistory = db.getRevisionHistory(gotRev);
assertEquals(history.size(), revHistory.size());
for(int i=0; i<history.size(); i++) {
RevisionInternal hrev = revHistory.get(i);
assertEquals(rev.getDocId(), hrev.getDocId());
assertEquals(history.get(i), hrev.getRevId());
assertFalse(rev.isDeleted());
}
}
}
| true | true | public void testRevTreeChangeNotifications() throws CouchbaseLiteException {
final String DOCUMENT_ID = "MyDocId";
// add a document with a single (first) revision
final RevisionInternal rev = new RevisionInternal(DOCUMENT_ID, "1-one", false, database);
Map<String, Object> revProperties = new HashMap<String, Object>();
revProperties.put("_id", rev.getDocId());
revProperties.put("_rev", rev.getRevId());
revProperties.put("message", "hi");
rev.setProperties(revProperties);
List<String> revHistory = Arrays.asList(rev.getRevId());
Database.ChangeListener listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertFalse(change.isConflict());
SavedRevision current = database.getDocument(change.getDocumentId()).getCurrentRevision();
assertEquals(rev.getRevId(), current.getId());
}
};
database.addChangeListener(listener);
database.forceInsert(rev, revHistory, null);
database.removeChangeListener(listener);
// add two more revisions to the document
final RevisionInternal rev3 = new RevisionInternal(DOCUMENT_ID, "3-three", false, database);
Map<String, Object> rev3Properties = new HashMap<String, Object>();
rev3Properties.put("_id", rev3.getDocId());
rev3Properties.put("_rev", rev3.getRevId());
rev3Properties.put("message", "hi again");
rev3.setProperties(rev3Properties);
List<String> rev3History = Arrays.asList(rev3.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev3.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
// assertFalse(change.isConflict()); commented - currently failing. see https://github.com/couchbase/couchbase-lite-android-core/pull/27
Document doc = database.getDocument(change.getDocumentId());
assertEquals(rev3.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(rev3, rev3History, null);
database.removeChangeListener(listener);
// add a conflicting revision, with the same history length as the last revision we
// inserted. Since this new revision's revID has a higher ASCII sort, it should become the
// new winning revision.
final RevisionInternal conflictRev = new RevisionInternal(DOCUMENT_ID, "3-winner", false, database);
Map<String, Object> conflictProperties = new HashMap<String, Object>();
conflictProperties.put("_id", conflictRev.getDocId());
conflictProperties.put("_rev", conflictRev.getRevId());
conflictProperties.put("message", "winner");
conflictRev.setProperties(conflictProperties);
List<String> conflictRevHistory = Arrays.asList(conflictRev.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(conflictRev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertTrue(change.isConflict());
Document doc = database.getDocument(change.getDocumentId());
assertEquals(conflictRev.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(2, doc.getConflictingRevisions().size());
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(conflictRev, conflictRevHistory, null);
database.removeChangeListener(listener);
}
| public void testRevTreeChangeNotifications() throws CouchbaseLiteException {
final String DOCUMENT_ID = "MyDocId";
// add a document with a single (first) revision
final RevisionInternal rev = new RevisionInternal(DOCUMENT_ID, "1-one", false, database);
Map<String, Object> revProperties = new HashMap<String, Object>();
revProperties.put("_id", rev.getDocId());
revProperties.put("_rev", rev.getRevId());
revProperties.put("message", "hi");
rev.setProperties(revProperties);
List<String> revHistory = Arrays.asList(rev.getRevId());
Database.ChangeListener listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertFalse(change.isConflict());
SavedRevision current = database.getDocument(change.getDocumentId()).getCurrentRevision();
assertEquals(rev.getRevId(), current.getId());
}
};
database.addChangeListener(listener);
database.forceInsert(rev, revHistory, null);
database.removeChangeListener(listener);
// add two more revisions to the document
final RevisionInternal rev3 = new RevisionInternal(DOCUMENT_ID, "3-three", false, database);
Map<String, Object> rev3Properties = new HashMap<String, Object>();
rev3Properties.put("_id", rev3.getDocId());
rev3Properties.put("_rev", rev3.getRevId());
rev3Properties.put("message", "hi again");
rev3.setProperties(rev3Properties);
List<String> rev3History = Arrays.asList(rev3.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(rev3.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertFalse(change.isConflict());
Document doc = database.getDocument(change.getDocumentId());
assertEquals(rev3.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(rev3, rev3History, null);
database.removeChangeListener(listener);
// add a conflicting revision, with the same history length as the last revision we
// inserted. Since this new revision's revID has a higher ASCII sort, it should become the
// new winning revision.
final RevisionInternal conflictRev = new RevisionInternal(DOCUMENT_ID, "3-winner", false, database);
Map<String, Object> conflictProperties = new HashMap<String, Object>();
conflictProperties.put("_id", conflictRev.getDocId());
conflictProperties.put("_rev", conflictRev.getRevId());
conflictProperties.put("message", "winner");
conflictRev.setProperties(conflictProperties);
List<String> conflictRevHistory = Arrays.asList(conflictRev.getRevId(), "2-two", rev.getRevId());
listener = new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
assertEquals(1, event.getChanges().size());
DocumentChange change = event.getChanges().get(0);
assertEquals(DOCUMENT_ID, change.getDocumentId());
assertEquals(conflictRev.getRevId(), change.getRevisionId());
assertTrue(change.isCurrentRevision());
assertTrue(change.isConflict());
Document doc = database.getDocument(change.getDocumentId());
assertEquals(conflictRev.getRevId(), doc.getCurrentRevisionId());
try {
assertEquals(2, doc.getConflictingRevisions().size());
assertEquals(3, doc.getRevisionHistory().size());
} catch (CouchbaseLiteException ex) {
fail("CouchbaseLiteException in change listener: " + ex.toString());
}
}
};
database.addChangeListener(listener);
database.forceInsert(conflictRev, conflictRevHistory, null);
database.removeChangeListener(listener);
}
|
diff --git a/org/nyet/ecuxplot/ECUxPlot.java b/org/nyet/ecuxplot/ECUxPlot.java
index 12f4cb4..9019239 100644
--- a/org/nyet/ecuxplot/ECUxPlot.java
+++ b/org/nyet/ecuxplot/ECUxPlot.java
@@ -1,667 +1,680 @@
package org.nyet.ecuxplot;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
import java.util.prefs.Preferences;
import java.awt.Color;
import java.awt.Point;
import java.awt.event.ActionEvent;
import javax.swing.*;
import com.apple.eawt.*;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.plot.XYPlot;
import org.jfree.data.time.Month;
import org.jfree.data.xy.DefaultXYDataset;
import org.jfree.data.xy.XYSeries;
import org.jfree.ui.ApplicationFrame;
import org.jfree.ui.RefineryUtilities;
import org.nyet.util.*;
import org.nyet.logfile.Dataset;
public class ECUxPlot extends ApplicationFrame implements SubActionListener {
// each file loaded has an associated dataset
private TreeMap<String, ECUxDataset> fileDatasets =
new TreeMap<String, ECUxDataset>();
private ECUxPresets presets = new ECUxPresets();
private ECUxChartPanel chartPanel;
private FATSChartFrame fatsFrame;
// Menus
private JMenuBar menuBar;
private AxisMenu xAxis;
private AxisMenu yAxis[] = new AxisMenu[2];
// Dialog boxes
private JFileChooser fc;
private FilterEditor fe;
private ConstantsEditor ce;
private PIDEditor pe;
private FuelingEditor fle;
private SAEEditor sae;
// Preferences
private Preferences prefs=null;
private Env env;
private Filter filter;
private boolean exitOnClose = true;
// Constructor
public ECUxPlot(final String title, boolean exitOnClose) { this(title, null, exitOnClose); }
public ECUxPlot(final String title, java.awt.Dimension size, boolean exitOnClose) {
super(title);
this.exitOnClose=exitOnClose;
WindowUtilities.setNativeLookAndFeel();
this.menuBar = new JMenuBar();
this.prefs = Preferences.userNodeForPackage(ECUxPlot.class);
this.filter = new Filter(this.prefs);
this.env = new Env(this.prefs);
java.net.URL imageURL =
getClass().getResource("icons/ECUxPlot2-64.png");
this.setIconImage(new javax.swing.ImageIcon(imageURL).getImage());
FileMenu filemenu = new FileMenu("File", this);
this.menuBar.add(filemenu);
OptionsMenu optmenu = new OptionsMenu("Options", this);
this.menuBar.add(optmenu);
this.menuBar.add(Box.createHorizontalGlue());
HelpMenu helpMenu = new HelpMenu("Help", this);
this.menuBar.add(helpMenu);
setJMenuBar(this.menuBar);
setPreferredSize(size!=null?size:this.windowSize());
}
public static boolean scatter(Preferences prefs) {
return prefs.getBoolean("scatter", false);
}
private boolean scatter() {
return this.scatter(this.prefs);
}
private Comparable xkey() {
final Comparable defaultXkey = this.presets.get("Power").xkey;
return this.prefs.get("xkey", defaultXkey.toString());
}
private Comparable[] ykeys(int index) {
final Comparable[] ykeys = this.presets.get("Power").ykeys;
final Comparable[] ykeys2 = this.presets.get("Power").ykeys2;
final String[] defaultYkeys = { Strings.join(",", ykeys), Strings.join(",", ykeys2) };
String k=this.prefs.get("ykeys"+index, defaultYkeys[index]);
return k.split(",");
}
private java.awt.Dimension windowSize() {
return new java.awt.Dimension(
this.prefs.getInt("windowWidth", 800),
this.prefs.getInt("windowHeight", 600));
}
private void prefsPutWindowSize() {
this.prefs.putInt("windowWidth", this.getWidth());
this.prefs.putInt("windowHeight", this.getHeight());
}
private void prefsPutXkey(Comparable xkey) {
this.prefs.put("xkey", xkey.toString());
}
private void prefsPutYkeys(int axis, Comparable [] ykeys) {
this.prefs.put("ykeys"+axis,Strings.join(",",ykeys));
}
private void prefsPutYkeys(int axis) {
final XYPlot plot = this.chartPanel.getChart().getXYPlot();
DefaultXYDataset dataset = (DefaultXYDataset)plot.getDataset(axis);
this.prefsPutYkeys(axis, ECUxChartFactory.getDatasetYkeys(dataset));
}
private void addChartYFromPrefs() {
this.addChartYFromPrefs(0);
this.addChartYFromPrefs(1);
updatePlotTitleAndYAxisLabels();
}
private void addChartYFromPrefs(int axis) {
this.addChartY(this.ykeys(axis), axis);
}
private void fileDatasetsChanged() {
// set title
this.setTitle("ECUxPlot " + Strings.join(", ", fileDatasets.keySet()));
// xaxis label depends on units found in files
updateXAxisLabel();
// Add all the data we just finished loading fom the files
addChartYFromPrefs();
// merge headers using a TreeSet - only add new headers
// note that TreeSet keeps us sorted!
TreeSet<String> hset = new TreeSet<String>();
for(ECUxDataset d : this.fileDatasets.values()) {
for(String s : d.getIds())
if(s!=null) hset.add(s);
}
String [] headers = hset.toArray(new String[0]);
if(headers.length<=0) return;
// rebuild the axis menus
if(this.xAxis!=null) this.menuBar.remove(this.xAxis);
if(this.yAxis[0]!=null) this.menuBar.remove(this.yAxis[0]);
if(this.yAxis[1]!=null) this.menuBar.remove(this.yAxis[1]);
this.xAxis = new AxisMenu("X Axis", headers, this, true, this.xkey());
this.menuBar.add(xAxis, 2);
this.yAxis[0] = new AxisMenu("Y Axis", headers, this, false,
this.ykeys(0));
this.menuBar.add(yAxis[0], 3);
this.yAxis[1] = new AxisMenu("Y Axis2", headers, this, false,
this.ykeys(1));
this.menuBar.add(yAxis[1], 4);
// hide/unhide filenames in the legend
final XYPlot plot = this.chartPanel.getChart().getXYPlot();
for(int axis=0;axis<plot.getDatasetCount();axis++) {
org.jfree.data.xy.XYDataset pds = plot.getDataset(axis);
for(int series=0;series<pds.getSeriesCount();series++) {
Dataset.Key ykey = (Dataset.Key)pds.getSeriesKey(series);
if(this.fileDatasets.size()==1) ykey.hideFilename();
else ykey.showFilename();
}
}
}
public void loadFiles(ArrayList<String> files) {
WaitCursor.startWaitCursor(this);
for(String s : files) {
if(s.length()>0) _loadFile(new File(s), false);
}
fileDatasetsChanged();
WaitCursor.stopWaitCursor(this);
}
private void loadFile(File file) { loadFile(file, false); }
private void loadFile(File file, Boolean replace) {
WaitCursor.startWaitCursor(this);
_loadFile(file, replace);
fileDatasetsChanged();
WaitCursor.stopWaitCursor(this);
}
private void _loadFile(File file, Boolean replace) {
try {
ECUxDataset data = new ECUxDataset(file.getAbsolutePath(),
this.env, this.filter);
// replacing, nuke all the currently loaded datasets
if(replace) {
this.fileDatasets = new TreeMap<String, ECUxDataset>();
if(this.fatsFrame!=null)
this.fatsFrame.clearDataset();
}
if(this.chartPanel == null) {
final JFreeChart chart =
ECUxChartFactory.create2AxisChart(this.scatter());
this.chartPanel = new ECUxChartPanel(chart);
setContentPane(this.chartPanel);
}
this.fileDatasets.put(file.getName(), data);
} catch (Exception e) {
JOptionPane.showMessageDialog(this, e);
e.printStackTrace();
return;
}
}
public void setMyVisible(Boolean b) {
if(this.fatsFrame!=null) {
if(!this.filter.enabled()) b=false;
if(b!=this.fatsFrame.isShowing())
this.fatsFrame.setVisible(b);
}
super.setVisible(b);
}
public void actionPerformed(ActionEvent event) {
AbstractButton source = (AbstractButton) (event.getSource());
if(source.getText().equals("Quit")) {
exitApp();
} else if(source.getText().equals("Export Chart")) {
if(this.chartPanel == null) {
JOptionPane.showMessageDialog(this, "Open a CSV first");
} else {
try {
String stem=null;
for(ECUxDataset d : this.fileDatasets.values()) {
String fname=d.getFileId();
if(stem == null) {
stem = Files.stem(fname);
} else {
stem += "_vs_" + Files.stem(Files.filename(fname));
}
}
this.chartPanel.doSaveAs(stem);
} catch (Exception e) {
JOptionPane.showMessageDialog(this, e);
e.printStackTrace();
}
}
} else if(source.getText().equals("Clear Chart")) {
// nuke axis menus
- this.menuBar.remove(this.xAxis);
- this.menuBar.remove(this.yAxis[0]);
- this.menuBar.remove(this.yAxis[1]);
+ if(this.menuBar!=null) {
+ if(this.xAxis!=null)
+ this.menuBar.remove(this.xAxis);
+ if(this.yAxis!=null) {
+ if(this.yAxis[0]!=null)
+ this.menuBar.remove(this.yAxis[0]);
+ if(this.yAxis[1]!=null)
+ this.menuBar.remove(this.yAxis[1]);
+ }
+ }
this.xAxis = null;
this.yAxis = new AxisMenu[2];
// nuke datasets
this.fileDatasets = new TreeMap<String, ECUxDataset>();
this.setTitle("ECUxPlot");
- this.chartPanel.setChart(null);
+ if(this.chartPanel!=null) {
+ this.chartPanel.setChart(null);
+ this.chartPanel.removeAll();
+ this.chartPanel=null;
+ }
+ if(this.fatsFrame!=null)
+ this.fatsFrame.clearDataset();
} else if(source.getText().equals("Close Chart")) {
this.dispose();
} else if(source.getText().equals("New Chart")) {
// do not exit if this child plot is closed
final ECUxPlot plot = new ECUxPlot("ECUxPlot", false);
plot.pack();
Point where = this.getLocation();
where.translate(20,20);
plot.setLocation(where);
plot.setMyVisible(true);
} else if(source.getText().equals("Open File") ||
source.getText().equals("Add File") ) {
if(fc==null) {
// current working dir
// String dir = System.getProperty("user.dir"));
// home dir
String dir = this.prefs.get("chooserDir",
System.getProperty("user.home"));
fc = new JFileChooser(dir);
fc.setFileFilter(new GenericFileFilter("csv", "CSV File"));
}
int ret = fc.showOpenDialog(this);
if(ret == JFileChooser.APPROVE_OPTION) {
Boolean replace =
source.getText().equals("Open File")?true:false;
WaitCursor.startWaitCursor(this);
loadFile(fc.getSelectedFile(), replace);
// if somebody hid the fats frame, lets unhide it for them.
setMyVisible(true);
WaitCursor.stopWaitCursor(this);
this.prefs.put("chooserDir",
fc.getCurrentDirectory().toString());
}
} else if(source.getText().equals("Scatter plot")) {
boolean s = source.isSelected();
this.prefs.putBoolean("scatter", s);
if(this.chartPanel != null)
ECUxChartFactory.setChartStyle(this.chartPanel.getChart(),
!s, s);
} else if(source.getText().equals("Filter data")) {
this.filter.enabled(source.isSelected());
rebuild();
} else if(source.getText().equals("Configure filter...")) {
if(this.fe == null) this.fe =
new FilterEditor(this.prefs, this.filter);
this.fe.showDialog(this, "Filter");
} else if(source.getText().equals("Edit constants...")) {
if(this.ce == null) this.ce =
new ConstantsEditor(this.prefs, this.env.c);
this.ce.showDialog(this, "Constants");
} else if(source.getText().equals("Edit fueling...")) {
if(this.fle == null) this.fle =
new FuelingEditor(this.prefs, this.env.f);
this.fle.showDialog(this, "Fueling");
} else if(source.getText().equals("Edit PID...")) {
if(this.pe == null) this.pe = new PIDEditor(this.env.pid);
this.pe.showDialog(this, "PID");
} else if(source.getText().equals("Apply SAE")) {
this.env.sae.enabled(source.isSelected());
rebuild();
updatePlotTitleAndYAxisLabels();
} else if(source.getText().equals("Edit SAE constants...")) {
if(this.sae == null) this.sae = new SAEEditor(this.prefs, this.env.sae);
this.sae.showDialog(this, "SAE");
} else if(source.getText().equals("About...")) {
JOptionPane.showMessageDialog(this, new AboutPanel(),
"About ECUxPlot", JOptionPane.PLAIN_MESSAGE);
} else {
JOptionPane.showMessageDialog(this,
"unhandled getText=" + source.getText() +
", actionCommand=" + event.getActionCommand());
}
}
private String findUnits(Comparable key) {
ArrayList<String> units = new ArrayList<String>();
for(ECUxDataset d : this.fileDatasets.values()) {
String u = d.units(key);
if(u==null || u.length()==0) continue;
if(!units.contains(u)) units.add(u);
}
return Strings.join(",", units);
}
private void updatePlotTitleAndYAxisLabels() {
if(this.chartPanel!=null)
updatePlotTitleAndYAxisLabels(
this.chartPanel.getChart().getXYPlot());
}
private void updatePlotTitleAndYAxisLabels(XYPlot plot) {
ArrayList<String> title = new ArrayList<String>();
for(int axis=0; axis<plot.getDatasetCount(); axis++) {
ArrayList<String> seriesTitle = new ArrayList<String>();
ArrayList<String> label= new ArrayList<String>();
final org.jfree.data.xy.XYDataset dataset = plot.getDataset(axis);
if(dataset!=null) {
for(int series=0; series<dataset.getSeriesCount(); series++) {
Comparable key = dataset.getSeriesKey(series);
if(key==null) continue;
String s;
if(key instanceof Dataset.Key)
s = ((Dataset.Key)key).getString();
else
s = key.toString();
// construct title array
if(!seriesTitle.contains(s)) seriesTitle.add(s);
// construct y axis label array
String l = findUnits(key);
if(l==null || l.length()==0) continue;
if(!label.contains(l)) label.add(l);
}
}
if(seriesTitle.size()>0)
title.add(Strings.join(", ", seriesTitle));
plot.getRangeAxis(axis).setLabel(Strings.join(",",label));
// hide axis if this axis has no series
plot.getRangeAxis(axis).setVisible(dataset.getSeriesCount()>0);
}
this.chartPanel.getChart().setTitle(Strings.join(" and ", title));
}
private void updateXAxisLabel() {
if(this.chartPanel!=null)
updateXAxisLabel(this.chartPanel.getChart().getXYPlot());
}
private void updateXAxisLabel(XYPlot plot) {
// find x axis label. just pick first one that has units we can use
String label = "";
for (ECUxDataset data : this.fileDatasets.values()) {
if(data.get(this.xkey())!=null) {
String units = data.units(this.xkey());
if(units != null) {
label = this.xkey().toString();
if(label.indexOf(units)==-1)
label += " ("+units+")";
break;
}
}
}
plot.getDomainAxis().setLabel(label);
}
private void addDataset(int axis, DefaultXYDataset dataset,
Dataset.Key ykey) {
ECUxDataset data = this.fileDatasets.get(ykey.getFilename());
ECUxChartFactory.addDataset(dataset, data,
this.xkey(), ykey);
ECUxChartFactory.setAxisStroke(this.chartPanel.getChart(), axis,
new java.awt.BasicStroke(1.5f));
}
public void rebuild() {
if(this.chartPanel==null) return;
WaitCursor.startWaitCursor(this);
for(ECUxDataset data : this.fileDatasets.values())
data.buildRanges();
if(this.fatsFrame==null) {
FATSChartFrame frame =
FATSChartFrame.createFATSChartFrame(this.fileDatasets,
this);
frame.pack();
java.net.URL imageURL =
getClass().getResource("icons/ECUxPlot2-64.png");
frame.setIconImage(new
javax.swing.ImageIcon(imageURL).getImage());
this.fatsFrame = frame;
} else {
this.fatsFrame.setDatasets(this.fileDatasets);
}
final XYPlot plot = this.chartPanel.getChart().getXYPlot();
for(int axis=0;axis<plot.getDatasetCount();axis++) {
org.jfree.data.xy.XYDataset pds = plot.getDataset(axis);
final DefaultXYDataset newdataset = new DefaultXYDataset();
for(int series=0;series<pds.getSeriesCount();series++) {
Dataset.Key ykey = (Dataset.Key)pds.getSeriesKey(series);
addDataset(axis, newdataset, ykey);
}
plot.setDataset(axis, newdataset);
}
updateXAxisLabel(plot);
WaitCursor.stopWaitCursor(this);
}
private void removeAllY() { this.removeAllY(0); this.removeAllY(1); }
private void removeAllY(int axis) {
final XYPlot plot = this.chartPanel.getChart().getXYPlot();
ECUxChartFactory.removeDataset((DefaultXYDataset)plot.getDataset(axis));
this.yAxis[axis].uncheckAll();
}
private void editChartY(Comparable ykey, int axis, boolean add) {
for(ECUxDataset data : this.fileDatasets.values())
editChartY(data, ykey, axis, add);
}
private void editChartY(ECUxDataset data, Comparable ykey, int axis,
boolean add) {
if(add && !(data.exists(ykey)) )
return;
final XYPlot plot = this.chartPanel.getChart().getXYPlot();
DefaultXYDataset pds = (DefaultXYDataset)plot.getDataset(axis);
if(add) {
Dataset.Key key = data.new Key(data.getFileId(),
ykey.toString());
if(this.fileDatasets.size()==1) key.hideFilename();
addDataset(axis, pds, key);
} else {
ECUxChartFactory.removeDataset(pds, ykey);
}
}
private void addChartY(Comparable[] ykey, int axis) {
for(Comparable k : ykey)
editChartY(k, axis, true);
}
public void savePreset(Comparable name) {
final Preset p = new Preset(name, this.xkey(),
this.ykeys(0), this.ykeys(1), this.scatter());
this.presets.put(name, p);
}
public void loadPreset(Comparable name) {
final Preset p = this.presets.get(name);
if(p!=null) loadPreset(p);
}
private void loadPreset(Preset p) {
if(this.chartPanel==null) return;
// get rid of everything
removeAllY();
prefsPutXkey(p.xkey);
// updateXAxisLabel depends on xkey prefs
updateXAxisLabel();
prefsPutYkeys(0,p.ykeys);
prefsPutYkeys(1,p.ykeys2);
// addChart depends on the xkey,ykeys put in prefs
addChartYFromPrefs();
// set up scatter depending on preset
final boolean s = p.scatter;
ECUxChartFactory.setChartStyle(this.chartPanel.getChart(), !s, s);
this.prefs.putBoolean("scatter", s);
}
public void actionPerformed(ActionEvent event, Comparable parentId) {
AbstractButton source = (AbstractButton) (event.getSource());
// System.out.println(source.getText() + ":" + parentId);
if(parentId.equals("X Axis")) {
prefsPutXkey(source.getText());
// rebuild depends on the value of prefs
rebuild();
} else if(parentId.equals("Y Axis")) {
if(source.getText().equals("Remove all")) removeAllY(0);
else editChartY(source.getText(),0,source.isSelected());
// prefsPutYkeys depends on the stuff that edit chart does
prefsPutYkeys(0);
} else if(parentId.equals("Y Axis2")) {
if(source.getText().equals("Remove all")) removeAllY(1);
else editChartY(source.getText(),1,source.isSelected());
// putkeys depends on the stuff that edit chart does
prefsPutYkeys(1);
}
updatePlotTitleAndYAxisLabels();
}
public void windowClosing(java.awt.event.WindowEvent we) {
if(exitOnClose) exitApp();
}
private void exitApp() {
this.prefsPutWindowSize();
if(this.fatsFrame!=null) this.fatsFrame.dispose();
System.exit(0);
}
private static class Options {
public String preset = null;
public File output = null;
public java.awt.Dimension size = null;
public ArrayList<String> files = new ArrayList<String>();
public Options(String[] args) {
int width = -1, height = -1;
for(int i=0; i<args.length; i++) {
if(args[i].charAt(0) == '-') {
if(i<args.length-1) {
if(args[i].equals("-p"))
this.preset = args[i+1];
else if(args[i].equals("-o"))
this.output = new File(args[i+1]);
else if(args[i].equals("-w"))
width = Integer.valueOf(args[i+1]);
else if(args[i].equals("-h"))
height = Integer.valueOf(args[i+1]);
i++;
}
if(args[i].equals("-l")) {
final ECUxPresets p = new ECUxPresets();
for(String s : p.keySet().toArray(new String[0]))
System.out.println(s);
System.exit(0);
}
if(args[i].equals("-h")) {
final ECUxPresets p = new ECUxPresets();
System.out.println(
"usage: ECUxPlot [-p Preset] [-o OutputFile] " +
"[-w width] [-h height] [LogFiles ... ]");
System.out.println(" ECUxPlot -l (list presets)");
System.out.println(" ECUxPlot -? (show usage)");
System.exit(0);
}
} else this.files.add(args[i]);
}
if(width>0 && height>0)
this.size = new java.awt.Dimension(width, height);
}
}
public static void main(final String[] args) {
javax.swing.SwingUtilities.invokeLater(new Runnable() {
public void run() {
final Options o = new Options(args);
// exit on close
final ECUxPlot plot = new ECUxPlot("ECUxPlot", o.size, true);
final Application app = Application.getApplication();
if(app!=null) {
app.addApplicationListener(new ApplicationAdapter() {
public void handleOpenFile(ApplicationEvent evt) {
final String file = evt.getFilename();
plot.loadFile(new File(file));
}
public void handleQuit(ApplicationEvent evt) {
plot.prefsPutWindowSize();
evt.setHandled(true);
}
});
}
plot.pack();
RefineryUtilities.centerFrameOnScreen(plot);
plot.loadFiles(o.files);
if(o.preset!=null)
plot.loadPreset(o.preset);
if(o.output!=null) {
try {
plot.pack();
plot.chartPanel.saveChartAsPNG(o.output);
System.exit(0);
} catch (IOException e) {
e.printStackTrace();
}
}
plot.setMyVisible(true);
}
});
}
public Preferences getPreferences() { return this.prefs; }
public TreeMap<Comparable, Preset> getPresets() { return this.presets; }
}
| false | true | public void actionPerformed(ActionEvent event) {
AbstractButton source = (AbstractButton) (event.getSource());
if(source.getText().equals("Quit")) {
exitApp();
} else if(source.getText().equals("Export Chart")) {
if(this.chartPanel == null) {
JOptionPane.showMessageDialog(this, "Open a CSV first");
} else {
try {
String stem=null;
for(ECUxDataset d : this.fileDatasets.values()) {
String fname=d.getFileId();
if(stem == null) {
stem = Files.stem(fname);
} else {
stem += "_vs_" + Files.stem(Files.filename(fname));
}
}
this.chartPanel.doSaveAs(stem);
} catch (Exception e) {
JOptionPane.showMessageDialog(this, e);
e.printStackTrace();
}
}
} else if(source.getText().equals("Clear Chart")) {
// nuke axis menus
this.menuBar.remove(this.xAxis);
this.menuBar.remove(this.yAxis[0]);
this.menuBar.remove(this.yAxis[1]);
this.xAxis = null;
this.yAxis = new AxisMenu[2];
// nuke datasets
this.fileDatasets = new TreeMap<String, ECUxDataset>();
this.setTitle("ECUxPlot");
this.chartPanel.setChart(null);
} else if(source.getText().equals("Close Chart")) {
this.dispose();
} else if(source.getText().equals("New Chart")) {
// do not exit if this child plot is closed
final ECUxPlot plot = new ECUxPlot("ECUxPlot", false);
plot.pack();
Point where = this.getLocation();
where.translate(20,20);
plot.setLocation(where);
plot.setMyVisible(true);
} else if(source.getText().equals("Open File") ||
source.getText().equals("Add File") ) {
if(fc==null) {
// current working dir
// String dir = System.getProperty("user.dir"));
// home dir
String dir = this.prefs.get("chooserDir",
System.getProperty("user.home"));
fc = new JFileChooser(dir);
fc.setFileFilter(new GenericFileFilter("csv", "CSV File"));
}
int ret = fc.showOpenDialog(this);
if(ret == JFileChooser.APPROVE_OPTION) {
Boolean replace =
source.getText().equals("Open File")?true:false;
WaitCursor.startWaitCursor(this);
loadFile(fc.getSelectedFile(), replace);
// if somebody hid the fats frame, lets unhide it for them.
setMyVisible(true);
WaitCursor.stopWaitCursor(this);
this.prefs.put("chooserDir",
fc.getCurrentDirectory().toString());
}
} else if(source.getText().equals("Scatter plot")) {
boolean s = source.isSelected();
this.prefs.putBoolean("scatter", s);
if(this.chartPanel != null)
ECUxChartFactory.setChartStyle(this.chartPanel.getChart(),
!s, s);
} else if(source.getText().equals("Filter data")) {
this.filter.enabled(source.isSelected());
rebuild();
} else if(source.getText().equals("Configure filter...")) {
if(this.fe == null) this.fe =
new FilterEditor(this.prefs, this.filter);
this.fe.showDialog(this, "Filter");
} else if(source.getText().equals("Edit constants...")) {
if(this.ce == null) this.ce =
new ConstantsEditor(this.prefs, this.env.c);
this.ce.showDialog(this, "Constants");
} else if(source.getText().equals("Edit fueling...")) {
if(this.fle == null) this.fle =
new FuelingEditor(this.prefs, this.env.f);
this.fle.showDialog(this, "Fueling");
} else if(source.getText().equals("Edit PID...")) {
if(this.pe == null) this.pe = new PIDEditor(this.env.pid);
this.pe.showDialog(this, "PID");
} else if(source.getText().equals("Apply SAE")) {
this.env.sae.enabled(source.isSelected());
rebuild();
updatePlotTitleAndYAxisLabels();
} else if(source.getText().equals("Edit SAE constants...")) {
if(this.sae == null) this.sae = new SAEEditor(this.prefs, this.env.sae);
this.sae.showDialog(this, "SAE");
} else if(source.getText().equals("About...")) {
JOptionPane.showMessageDialog(this, new AboutPanel(),
"About ECUxPlot", JOptionPane.PLAIN_MESSAGE);
} else {
JOptionPane.showMessageDialog(this,
"unhandled getText=" + source.getText() +
", actionCommand=" + event.getActionCommand());
}
}
| public void actionPerformed(ActionEvent event) {
AbstractButton source = (AbstractButton) (event.getSource());
if(source.getText().equals("Quit")) {
exitApp();
} else if(source.getText().equals("Export Chart")) {
if(this.chartPanel == null) {
JOptionPane.showMessageDialog(this, "Open a CSV first");
} else {
try {
String stem=null;
for(ECUxDataset d : this.fileDatasets.values()) {
String fname=d.getFileId();
if(stem == null) {
stem = Files.stem(fname);
} else {
stem += "_vs_" + Files.stem(Files.filename(fname));
}
}
this.chartPanel.doSaveAs(stem);
} catch (Exception e) {
JOptionPane.showMessageDialog(this, e);
e.printStackTrace();
}
}
} else if(source.getText().equals("Clear Chart")) {
// nuke axis menus
if(this.menuBar!=null) {
if(this.xAxis!=null)
this.menuBar.remove(this.xAxis);
if(this.yAxis!=null) {
if(this.yAxis[0]!=null)
this.menuBar.remove(this.yAxis[0]);
if(this.yAxis[1]!=null)
this.menuBar.remove(this.yAxis[1]);
}
}
this.xAxis = null;
this.yAxis = new AxisMenu[2];
// nuke datasets
this.fileDatasets = new TreeMap<String, ECUxDataset>();
this.setTitle("ECUxPlot");
if(this.chartPanel!=null) {
this.chartPanel.setChart(null);
this.chartPanel.removeAll();
this.chartPanel=null;
}
if(this.fatsFrame!=null)
this.fatsFrame.clearDataset();
} else if(source.getText().equals("Close Chart")) {
this.dispose();
} else if(source.getText().equals("New Chart")) {
// do not exit if this child plot is closed
final ECUxPlot plot = new ECUxPlot("ECUxPlot", false);
plot.pack();
Point where = this.getLocation();
where.translate(20,20);
plot.setLocation(where);
plot.setMyVisible(true);
} else if(source.getText().equals("Open File") ||
source.getText().equals("Add File") ) {
if(fc==null) {
// current working dir
// String dir = System.getProperty("user.dir"));
// home dir
String dir = this.prefs.get("chooserDir",
System.getProperty("user.home"));
fc = new JFileChooser(dir);
fc.setFileFilter(new GenericFileFilter("csv", "CSV File"));
}
int ret = fc.showOpenDialog(this);
if(ret == JFileChooser.APPROVE_OPTION) {
Boolean replace =
source.getText().equals("Open File")?true:false;
WaitCursor.startWaitCursor(this);
loadFile(fc.getSelectedFile(), replace);
// if somebody hid the fats frame, lets unhide it for them.
setMyVisible(true);
WaitCursor.stopWaitCursor(this);
this.prefs.put("chooserDir",
fc.getCurrentDirectory().toString());
}
} else if(source.getText().equals("Scatter plot")) {
boolean s = source.isSelected();
this.prefs.putBoolean("scatter", s);
if(this.chartPanel != null)
ECUxChartFactory.setChartStyle(this.chartPanel.getChart(),
!s, s);
} else if(source.getText().equals("Filter data")) {
this.filter.enabled(source.isSelected());
rebuild();
} else if(source.getText().equals("Configure filter...")) {
if(this.fe == null) this.fe =
new FilterEditor(this.prefs, this.filter);
this.fe.showDialog(this, "Filter");
} else if(source.getText().equals("Edit constants...")) {
if(this.ce == null) this.ce =
new ConstantsEditor(this.prefs, this.env.c);
this.ce.showDialog(this, "Constants");
} else if(source.getText().equals("Edit fueling...")) {
if(this.fle == null) this.fle =
new FuelingEditor(this.prefs, this.env.f);
this.fle.showDialog(this, "Fueling");
} else if(source.getText().equals("Edit PID...")) {
if(this.pe == null) this.pe = new PIDEditor(this.env.pid);
this.pe.showDialog(this, "PID");
} else if(source.getText().equals("Apply SAE")) {
this.env.sae.enabled(source.isSelected());
rebuild();
updatePlotTitleAndYAxisLabels();
} else if(source.getText().equals("Edit SAE constants...")) {
if(this.sae == null) this.sae = new SAEEditor(this.prefs, this.env.sae);
this.sae.showDialog(this, "SAE");
} else if(source.getText().equals("About...")) {
JOptionPane.showMessageDialog(this, new AboutPanel(),
"About ECUxPlot", JOptionPane.PLAIN_MESSAGE);
} else {
JOptionPane.showMessageDialog(this,
"unhandled getText=" + source.getText() +
", actionCommand=" + event.getActionCommand());
}
}
|
diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/accounts/manageproxies/ProxyRelationshipSelectionCriteria.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/accounts/manageproxies/ProxyRelationshipSelectionCriteria.java
index d0d4f7d06..4f7389944 100644
--- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/accounts/manageproxies/ProxyRelationshipSelectionCriteria.java
+++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/accounts/manageproxies/ProxyRelationshipSelectionCriteria.java
@@ -1,84 +1,84 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.accounts.manageproxies;
/**
* On what basis are we selecting proxy relationships?
*
* Are we viewing by Proxy or by Profile? What is the search term, if any? How
* many results per page, and what page are we on?
*
* Search terms are matched against last name combined with first name, of
* either UserAccount(Proxy) or Individual(Profile), depending on how we are
* listing. Searches are case-insensitive.
*/
public class ProxyRelationshipSelectionCriteria {
public static final int DEFAULT_RELATIONSHIPS_PER_PAGE = 20;
public static final ProxyRelationshipSelectionCriteria DEFAULT_CRITERIA = new ProxyRelationshipSelectionCriteria(
DEFAULT_RELATIONSHIPS_PER_PAGE, 1, ProxyRelationshipView.BY_PROXY,
"");
public enum ProxyRelationshipView {
BY_PROXY, BY_PROFILE;
public static ProxyRelationshipView DEFAULT_VIEW = BY_PROXY;
}
/** How many relationships should we bring back, at most? */
private final int relationshipsPerPage;
/** What page are we on? (1-origin) */
private final int pageIndex;
/** What view are we using? */
private final ProxyRelationshipView viewBy;
/** What term are we searching on, if any? */
private final String searchTerm;
public ProxyRelationshipSelectionCriteria(int relationshipsPerPage,
int pageIndex, ProxyRelationshipView viewBy, String searchTerm) {
if (relationshipsPerPage <= 0) {
throw new IllegalArgumentException("relationshipsPerPage "
+ "must be a positive integer, not " + relationshipsPerPage);
}
this.relationshipsPerPage = relationshipsPerPage;
if (pageIndex <= 0) {
throw new IllegalArgumentException("pageIndex must be a "
- + "non-negative integer, not " + pageIndex);
+ + "positive integer, not " + pageIndex);
}
this.pageIndex = pageIndex;
this.viewBy = nonNull(viewBy, ProxyRelationshipView.DEFAULT_VIEW);
this.searchTerm = nonNull(searchTerm, "");
}
public int getRelationshipsPerPage() {
return relationshipsPerPage;
}
public int getPageIndex() {
return pageIndex;
}
public ProxyRelationshipView getViewBy() {
return viewBy;
}
public String getSearchTerm() {
return searchTerm;
}
private <T> T nonNull(T t, T nullValue) {
return (t == null) ? nullValue : t;
}
@Override
public String toString() {
return "ProxyRelationshipSelectionCriteria[relationshipsPerPage="
+ relationshipsPerPage + ", pageIndex=" + pageIndex
+ ", viewBy=" + viewBy + "', searchTerm='" + searchTerm + "']";
}
}
| true | true | public ProxyRelationshipSelectionCriteria(int relationshipsPerPage,
int pageIndex, ProxyRelationshipView viewBy, String searchTerm) {
if (relationshipsPerPage <= 0) {
throw new IllegalArgumentException("relationshipsPerPage "
+ "must be a positive integer, not " + relationshipsPerPage);
}
this.relationshipsPerPage = relationshipsPerPage;
if (pageIndex <= 0) {
throw new IllegalArgumentException("pageIndex must be a "
+ "non-negative integer, not " + pageIndex);
}
this.pageIndex = pageIndex;
this.viewBy = nonNull(viewBy, ProxyRelationshipView.DEFAULT_VIEW);
this.searchTerm = nonNull(searchTerm, "");
}
| public ProxyRelationshipSelectionCriteria(int relationshipsPerPage,
int pageIndex, ProxyRelationshipView viewBy, String searchTerm) {
if (relationshipsPerPage <= 0) {
throw new IllegalArgumentException("relationshipsPerPage "
+ "must be a positive integer, not " + relationshipsPerPage);
}
this.relationshipsPerPage = relationshipsPerPage;
if (pageIndex <= 0) {
throw new IllegalArgumentException("pageIndex must be a "
+ "positive integer, not " + pageIndex);
}
this.pageIndex = pageIndex;
this.viewBy = nonNull(viewBy, ProxyRelationshipView.DEFAULT_VIEW);
this.searchTerm = nonNull(searchTerm, "");
}
|
diff --git a/src/com/android/gallery3d/app/PhotoPage.java b/src/com/android/gallery3d/app/PhotoPage.java
index c5c77e11b..b1736203c 100644
--- a/src/com/android/gallery3d/app/PhotoPage.java
+++ b/src/com/android/gallery3d/app/PhotoPage.java
@@ -1,1192 +1,1195 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.gallery3d.app;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Rect;
import android.net.Uri;
import android.nfc.NfcAdapter;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.animation.AccelerateInterpolator;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.actionbarsherlock.app.ActionBar.OnMenuVisibilityListener;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuItem;
import com.android.gallery3d.R;
import com.android.gallery3d.anim.FloatAnimation;
import com.android.gallery3d.common.ApiHelper;
import com.android.gallery3d.common.Utils;
import com.android.gallery3d.data.DataManager;
import com.android.gallery3d.data.FilterDeleteSet;
import com.android.gallery3d.data.MediaDetails;
import com.android.gallery3d.data.MediaItem;
import com.android.gallery3d.data.MediaObject;
import com.android.gallery3d.data.MediaSet;
import com.android.gallery3d.data.MtpSource;
import com.android.gallery3d.data.Path;
import com.android.gallery3d.data.SecureAlbum;
import com.android.gallery3d.data.SecureSource;
import com.android.gallery3d.data.SnailAlbum;
import com.android.gallery3d.data.SnailItem;
import com.android.gallery3d.data.SnailSource;
import com.android.gallery3d.picasasource.PicasaSource;
import com.android.gallery3d.ui.AnimationTime;
import com.android.gallery3d.ui.BitmapScreenNail;
import com.android.gallery3d.ui.DetailsHelper;
import com.android.gallery3d.ui.DetailsHelper.CloseListener;
import com.android.gallery3d.ui.DetailsHelper.DetailsSource;
import com.android.gallery3d.ui.GLCanvas;
import com.android.gallery3d.ui.GLRoot;
import com.android.gallery3d.ui.GLRoot.OnGLIdleListener;
import com.android.gallery3d.ui.GLView;
import com.android.gallery3d.ui.ImportCompleteListener;
import com.android.gallery3d.ui.MenuExecutor;
import com.android.gallery3d.ui.PhotoFallbackEffect;
import com.android.gallery3d.ui.PhotoView;
import com.android.gallery3d.ui.PreparePageFadeoutTexture;
import com.android.gallery3d.ui.RawTexture;
import com.android.gallery3d.ui.SelectionManager;
import com.android.gallery3d.ui.SynchronizedHandler;
import com.android.gallery3d.util.GalleryUtils;
import com.android.gallery3d.util.LightCycleHelper;
import com.android.gallery3d.util.MediaSetUtils;
public class PhotoPage extends ActivityState implements
PhotoView.Listener, OrientationManager.Listener, AppBridge.Server,
PhotoPageBottomControls.Delegate {
private static final String TAG = "PhotoPage";
private static final int MSG_HIDE_BARS = 1;
private static final int MSG_LOCK_ORIENTATION = 2;
private static final int MSG_UNLOCK_ORIENTATION = 3;
private static final int MSG_ON_FULL_SCREEN_CHANGED = 4;
private static final int MSG_UPDATE_ACTION_BAR = 5;
private static final int MSG_UNFREEZE_GLROOT = 6;
private static final int MSG_WANT_BARS = 7;
private static final int MSG_REFRESH_GRID_BUTTON = 8;
private static final int MSG_REFRESH_BOTTOM_CONTROLS = 9;
private static final int HIDE_BARS_TIMEOUT = 3500;
private static final int UNFREEZE_GLROOT_TIMEOUT = 250;
private static final int REQUEST_SLIDESHOW = 1;
private static final int REQUEST_CROP = 2;
private static final int REQUEST_CROP_PICASA = 3;
private static final int REQUEST_EDIT = 4;
private static final int REQUEST_PLAY_VIDEO = 5;
private static final int REQUEST_TRIM = 6;
public static final String KEY_MEDIA_SET_PATH = "media-set-path";
public static final String KEY_MEDIA_ITEM_PATH = "media-item-path";
public static final String KEY_INDEX_HINT = "index-hint";
public static final String KEY_OPEN_ANIMATION_RECT = "open-animation-rect";
public static final String KEY_APP_BRIDGE = "app-bridge";
public static final String KEY_TREAT_BACK_AS_UP = "treat-back-as-up";
public static final String KEY_START_IN_FILMSTRIP = "start-in-filmstrip";
public static final String KEY_RETURN_INDEX_HINT = "return-index-hint";
public static final String KEY_SHOW_WHEN_LOCKED = "show_when_locked";
public static final String KEY_ALBUMPAGE_TRANSITION = "albumpage-transition";
public static final int MSG_ALBUMPAGE_NONE = 0;
public static final int MSG_ALBUMPAGE_STARTED = 1;
public static final int MSG_ALBUMPAGE_RESUMED = 2;
public static final int MSG_ALBUMPAGE_PICKED = 4;
public static final String ACTION_NEXTGEN_EDIT = "action_nextgen_edit";
private GalleryApp mApplication;
private SelectionManager mSelectionManager;
private PhotoView mPhotoView;
private PhotoPage.Model mModel;
private DetailsHelper mDetailsHelper;
private boolean mShowDetails;
// mMediaSet could be null if there is no KEY_MEDIA_SET_PATH supplied.
// E.g., viewing a photo in gmail attachment
private FilterDeleteSet mMediaSet;
// The mediaset used by camera launched from secure lock screen.
private SecureAlbum mSecureAlbum;
private int mCurrentIndex = 0;
private Handler mHandler;
private boolean mShowBars = true;
private volatile boolean mActionBarAllowed = true;
private GalleryActionBar mActionBar;
private boolean mIsMenuVisible;
private PhotoPageBottomControls mBottomControls;
private MediaItem mCurrentPhoto = null;
private MenuExecutor mMenuExecutor;
private boolean mIsActive;
private String mSetPathString;
// This is the original mSetPathString before adding the camera preview item.
private String mOriginalSetPathString;
private AppBridge mAppBridge;
private SnailItem mScreenNailItem;
private SnailAlbum mScreenNailSet;
private OrientationManager mOrientationManager;
private boolean mHasActivityResult;
private boolean mTreatBackAsUp;
private boolean mStartInFilmstrip;
private boolean mStartedFromAlbumPage;
private RawTexture mFadeOutTexture;
private Rect mOpenAnimationRect;
public static final int ANIM_TIME_OPENING = 300;
// The item that is deleted (but it can still be undeleted before commiting)
private Path mDeletePath;
private boolean mDeleteIsFocus; // whether the deleted item was in focus
private NfcAdapter mNfcAdapter;
private final MyMenuVisibilityListener mMenuVisibilityListener =
new MyMenuVisibilityListener();
public static interface Model extends PhotoView.Model {
public void resume();
public void pause();
public boolean isEmpty();
public void setCurrentPhoto(Path path, int indexHint);
}
private class MyMenuVisibilityListener implements OnMenuVisibilityListener {
@Override
public void onMenuVisibilityChanged(boolean isVisible) {
mIsMenuVisible = isVisible;
refreshHidingMessage();
}
}
private static class BackgroundFadeOut extends FloatAnimation {
public BackgroundFadeOut() {
super(1f, 0f, ANIM_TIME_OPENING);
setInterpolator(new AccelerateInterpolator(2f));
}
}
private final FloatAnimation mBackgroundFade = new BackgroundFadeOut();
@Override
protected int getBackgroundColorId() {
return R.color.photo_background;
}
private final GLView mRootPane = new GLView() {
@Override
protected void renderBackground(GLCanvas view) {
if (mFadeOutTexture != null) {
if (mBackgroundFade.calculate(AnimationTime.get())) invalidate();
if (!mBackgroundFade.isActive()) {
mFadeOutTexture = null;
mOpenAnimationRect = null;
BitmapScreenNail.enableDrawPlaceholder();
} else {
float fadeAlpha = mBackgroundFade.get();
if (fadeAlpha < 1f) {
view.clearBuffer(getBackgroundColor());
view.setAlpha(fadeAlpha);
}
mFadeOutTexture.draw(view, 0, 0);
view.setAlpha(1f - fadeAlpha);
return;
}
}
view.clearBuffer(getBackgroundColor());
}
@Override
protected void onLayout(
boolean changed, int left, int top, int right, int bottom) {
mPhotoView.layout(0, 0, right - left, bottom - top);
if (mShowDetails) {
mDetailsHelper.layout(left, mActionBar.getHeight(), right, bottom);
}
}
};
@Override
public void onCreate(Bundle data, Bundle restoreState) {
super.onCreate(data, restoreState);
mActionBar = mActivity.getGalleryActionBar();
mSelectionManager = new SelectionManager(mActivity, false);
mMenuExecutor = new MenuExecutor(mActivity, mSelectionManager);
mPhotoView = new PhotoView(mActivity);
mPhotoView.setListener(this);
mRootPane.addComponent(mPhotoView);
mApplication = (GalleryApp) ((Activity) mActivity).getApplication();
mOrientationManager = mActivity.getOrientationManager();
mOrientationManager.addListener(this);
mActivity.getGLRoot().setOrientationSource(mOrientationManager);
mHandler = new SynchronizedHandler(mActivity.getGLRoot()) {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case MSG_HIDE_BARS: {
hideBars();
break;
}
case MSG_REFRESH_GRID_BUTTON: {
setGridButtonVisibility(mPhotoView.getFilmMode());
break;
}
case MSG_REFRESH_BOTTOM_CONTROLS: {
if (mBottomControls != null) mBottomControls.refresh();
break;
}
case MSG_LOCK_ORIENTATION: {
mOrientationManager.lockOrientation();
break;
}
case MSG_UNLOCK_ORIENTATION: {
mOrientationManager.unlockOrientation();
break;
}
case MSG_ON_FULL_SCREEN_CHANGED: {
mAppBridge.onFullScreenChanged(message.arg1 == 1);
break;
}
case MSG_UPDATE_ACTION_BAR: {
updateBars();
break;
}
case MSG_WANT_BARS: {
wantBars();
break;
}
case MSG_UNFREEZE_GLROOT: {
mActivity.getGLRoot().unfreeze();
break;
}
default: throw new AssertionError(message.what);
}
}
};
mSetPathString = data.getString(KEY_MEDIA_SET_PATH);
mOriginalSetPathString = mSetPathString;
mNfcAdapter = NfcAdapter.getDefaultAdapter(mActivity.getAndroidContext());
String itemPathString = data.getString(KEY_MEDIA_ITEM_PATH);
Path itemPath = itemPathString != null ?
Path.fromString(data.getString(KEY_MEDIA_ITEM_PATH)) :
null;
mTreatBackAsUp = data.getBoolean(KEY_TREAT_BACK_AS_UP, false);
mStartInFilmstrip =
data.getBoolean(KEY_START_IN_FILMSTRIP, false);
mStartedFromAlbumPage =
data.getInt(KEY_ALBUMPAGE_TRANSITION,
MSG_ALBUMPAGE_NONE) == MSG_ALBUMPAGE_STARTED;
setGridButtonVisibility(!mStartedFromAlbumPage);
if (mSetPathString != null) {
mAppBridge = (AppBridge) data.getParcelable(KEY_APP_BRIDGE);
if (mAppBridge != null) {
mFlags |= FLAG_HIDE_ACTION_BAR | FLAG_HIDE_STATUS_BAR;
mShowBars = false;
mAppBridge.setServer(this);
mOrientationManager.lockOrientation();
// Get the ScreenNail from AppBridge and register it.
int id = SnailSource.newId();
Path screenNailSetPath = SnailSource.getSetPath(id);
Path screenNailItemPath = SnailSource.getItemPath(id);
mScreenNailSet = (SnailAlbum) mActivity.getDataManager()
.getMediaObject(screenNailSetPath);
mScreenNailItem = (SnailItem) mActivity.getDataManager()
.getMediaObject(screenNailItemPath);
mScreenNailItem.setScreenNail(mAppBridge.attachScreenNail());
// Check if the path is a secure album.
if (SecureSource.isSecurePath(mSetPathString)) {
mSecureAlbum = (SecureAlbum) mActivity.getDataManager()
.getMediaSet(mSetPathString);
}
if (data.getBoolean(KEY_SHOW_WHEN_LOCKED, false)) {
// Set the flag to be on top of the lock screen.
mFlags |= FLAG_SHOW_WHEN_LOCKED;
}
// Combine the original MediaSet with the one for ScreenNail
// from AppBridge.
mSetPathString = "/combo/item/{" + screenNailSetPath +
"," + mSetPathString + "}";
// Start from the screen nail.
itemPath = screenNailItemPath;
}
MediaSet originalSet = mActivity.getDataManager()
.getMediaSet(mSetPathString);
mSelectionManager.setSourceMediaSet(originalSet);
mSetPathString = "/filter/delete/{" + mSetPathString + "}";
mMediaSet = (FilterDeleteSet) mActivity.getDataManager()
.getMediaSet(mSetPathString);
mCurrentIndex = data.getInt(KEY_INDEX_HINT, 0);
if (mMediaSet == null) {
Log.w(TAG, "failed to restore " + mSetPathString);
}
if (itemPath == null) {
- if (mMediaSet.getMediaItemCount() > 0) {
+ int mediaItemCount = mMediaSet.getMediaItemCount();
+ if (mediaItemCount > 0) {
+ if (mCurrentIndex >= mediaItemCount) mCurrentIndex = 0;
itemPath = mMediaSet.getMediaItem(mCurrentIndex, 1)
.get(0).getPath();
} else {
+ // Bail out, PhotoPage can't load on an empty album
return;
}
}
PhotoDataAdapter pda = new PhotoDataAdapter(
mActivity, mPhotoView, mMediaSet, itemPath, mCurrentIndex,
mAppBridge == null ? -1 : 0,
mAppBridge == null ? false : mAppBridge.isPanorama(),
mAppBridge == null ? false : mAppBridge.isStaticCamera());
mModel = pda;
mPhotoView.setModel(mModel);
pda.setDataListener(new PhotoDataAdapter.DataListener() {
@Override
public void onPhotoChanged(int index, Path item) {
mCurrentIndex = index;
if (item != null) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
}
updateBars();
}
@Override
public void onLoadingFinished() {
if (!mModel.isEmpty()) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
} else if (mIsActive) {
// We only want to finish the PhotoPage if there is no
// deletion that the user can undo.
if (mMediaSet.getNumberOfDeletions() == 0) {
mActivity.getStateManager().finishState(
PhotoPage.this);
}
}
}
@Override
public void onLoadingStarted() {
}
});
} else {
// Get default media set by the URI
MediaItem mediaItem = (MediaItem)
mActivity.getDataManager().getMediaObject(itemPath);
mModel = new SinglePhotoDataAdapter(mActivity, mPhotoView, mediaItem);
mPhotoView.setModel(mModel);
updateCurrentPhoto(mediaItem);
}
mPhotoView.setFilmMode(mStartInFilmstrip && mMediaSet.getMediaItemCount() > 1);
if (mSecureAlbum == null) {
RelativeLayout galleryRoot = (RelativeLayout) ((Activity) mActivity)
.findViewById(mAppBridge != null ? R.id.content : R.id.gallery_root);
if (galleryRoot != null) {
mBottomControls = new PhotoPageBottomControls(this, mActivity, galleryRoot);
}
}
}
public boolean canDisplayBottomControls() {
return mShowBars && !mPhotoView.getFilmMode();
}
public boolean canDisplayBottomControl(int control) {
if (mCurrentPhoto == null) return false;
switch(control) {
case R.id.photopage_bottom_control_edit:
return mCurrentPhoto.getMediaType() == MediaObject.MEDIA_TYPE_IMAGE;
case R.id.photopage_bottom_control_panorama:
return (mCurrentPhoto.getSupportedOperations()
& MediaItem.SUPPORT_PANORAMA) != 0;
default:
return false;
}
}
public void onBottomControlClicked(int control) {
switch(control) {
case R.id.photopage_bottom_control_edit:
launchPhotoEditor();
return;
case R.id.photopage_bottom_control_panorama:
LightCycleHelper.viewPanorama(mActivity, mCurrentPhoto.getContentUri());
return;
default:
return;
}
}
@TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN)
private void setNfcBeamPushUris(Uri[] uris) {
if (mNfcAdapter != null && ApiHelper.HAS_SET_BEAM_PUSH_URIS) {
mNfcAdapter.setBeamPushUris(uris, mActivity);
}
}
private Intent createShareIntent(Path path) {
DataManager manager = mActivity.getDataManager();
int type = manager.getMediaType(path);
int support = manager.getSupportedOperations(path);
boolean isPanorama = (support & MediaObject.SUPPORT_PANORAMA) != 0;
Intent intent = new Intent(Intent.ACTION_SEND);
intent.setType(MenuExecutor.getMimeType(type, isPanorama));
Uri uri = manager.getContentUri(path);
intent.putExtra(Intent.EXTRA_STREAM, uri);
return intent;
}
private void launchPhotoEditor() {
MediaItem current = mModel.getMediaItem(0);
if (current == null) return;
Intent intent = new Intent(ACTION_NEXTGEN_EDIT);
intent.setData(mActivity.getDataManager().getContentUri(current.getPath())).setFlags(
Intent.FLAG_GRANT_READ_URI_PERMISSION);
if (mActivity.getPackageManager()
.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY).size() == 0) {
intent.setAction(Intent.ACTION_EDIT);
}
((Activity) mActivity).startActivityForResult(Intent.createChooser(intent, null),
REQUEST_EDIT);
}
private void updateShareURI(Path path) {
DataManager manager = mActivity.getDataManager();
Uri uri = manager.getContentUri(path);
mActionBar.setShareIntent(createShareIntent(path));
setNfcBeamPushUris(new Uri[]{uri});
}
private void updateCurrentPhoto(MediaItem photo) {
if (mCurrentPhoto == photo) return;
mCurrentPhoto = photo;
if (mCurrentPhoto == null) return;
updateMenuOperations();
updateTitle();
if (mBottomControls != null) mBottomControls.refresh();
if (mShowDetails) {
mDetailsHelper.reloadDetails();
}
if ((mSecureAlbum == null)
&& (photo.getSupportedOperations() & MediaItem.SUPPORT_SHARE) != 0) {
updateShareURI(photo.getPath());
}
}
private void updateTitle() {
if (mCurrentPhoto == null) return;
boolean showTitle = mActivity.getAndroidContext().getResources().getBoolean(
R.bool.show_action_bar_title);
if (showTitle && mCurrentPhoto.getName() != null) {
mActionBar.setTitle(mCurrentPhoto.getName());
} else {
mActionBar.setTitle("");
}
}
private void updateMenuOperations() {
Menu menu = mActionBar.getMenu();
// it could be null if onCreateActionBar has not been called yet
if (menu == null) return;
setGridButtonVisibility(mPhotoView.getFilmMode());
MenuItem item = menu.findItem(R.id.action_slideshow);
item.setVisible((mSecureAlbum == null) && canDoSlideShow());
if (mCurrentPhoto == null) return;
int supportedOperations = mCurrentPhoto.getSupportedOperations();
if (mSecureAlbum != null) {
supportedOperations &= MediaObject.SUPPORT_DELETE;
} else if (!GalleryUtils.isEditorAvailable(mActivity, "image/*")) {
supportedOperations &= ~MediaObject.SUPPORT_EDIT;
}
MenuExecutor.updateMenuOperation(menu, supportedOperations);
}
private boolean canDoSlideShow() {
if (mMediaSet == null || mCurrentPhoto == null) {
return false;
}
if (mCurrentPhoto.getMediaType() != MediaObject.MEDIA_TYPE_IMAGE) {
return false;
}
if (MtpSource.isMtpPath(mOriginalSetPathString)) {
return false;
}
return true;
}
//////////////////////////////////////////////////////////////////////////
// Action Bar show/hide management
//////////////////////////////////////////////////////////////////////////
private void showBars() {
if (mShowBars) return;
mShowBars = true;
mOrientationManager.unlockOrientation();
mActionBar.show();
mActivity.getGLRoot().setLightsOutMode(false);
refreshHidingMessage();
if (mBottomControls != null) mBottomControls.refresh();
}
private void hideBars() {
if (!mShowBars) return;
mShowBars = false;
mActionBar.hide();
mActivity.getGLRoot().setLightsOutMode(true);
mHandler.removeMessages(MSG_HIDE_BARS);
if (mBottomControls != null) mBottomControls.refresh();
}
private void refreshHidingMessage() {
mHandler.removeMessages(MSG_HIDE_BARS);
if (!mIsMenuVisible && !mPhotoView.getFilmMode()) {
mHandler.sendEmptyMessageDelayed(MSG_HIDE_BARS, HIDE_BARS_TIMEOUT);
}
}
private boolean canShowBars() {
// No bars if we are showing camera preview.
if (mAppBridge != null && mCurrentIndex == 0
&& !mPhotoView.getFilmMode()) return false;
// No bars if it's not allowed.
if (!mActionBarAllowed) return false;
return true;
}
private void wantBars() {
if (canShowBars()) showBars();
}
private void toggleBars() {
if (mShowBars) {
hideBars();
} else {
if (canShowBars()) showBars();
}
}
private void updateBars() {
if (!canShowBars()) {
hideBars();
}
}
@Override
public void onOrientationCompensationChanged() {
mActivity.getGLRoot().requestLayoutContentPane();
}
@Override
protected void onBackPressed() {
if (mShowDetails) {
hideDetails();
} else if (mAppBridge == null || !switchWithCaptureAnimation(-1)) {
// We are leaving this page. Set the result now.
setResult();
if (mStartInFilmstrip && !mPhotoView.getFilmMode()) {
mPhotoView.setFilmMode(true);
} else if (mTreatBackAsUp) {
onUpPressed();
} else {
super.onBackPressed();
}
}
}
private void onUpPressed() {
if (mStartInFilmstrip && !mPhotoView.getFilmMode()) {
mPhotoView.setFilmMode(true);
return;
}
if (mActivity.getStateManager().getStateCount() > 1) {
setResult();
super.onBackPressed();
return;
}
if (mOriginalSetPathString == null) return;
if (mAppBridge == null) {
// We're in view mode so set up the stacks on our own.
Bundle data = new Bundle(getData());
data.putString(AlbumPage.KEY_MEDIA_PATH, mOriginalSetPathString);
data.putString(AlbumPage.KEY_PARENT_MEDIA_PATH,
mActivity.getDataManager().getTopSetPath(
DataManager.INCLUDE_ALL));
mActivity.getStateManager().switchState(this, AlbumPage.class, data);
} else {
// Start the real gallery activity to view the camera roll.
Uri uri = Uri.parse("content://media/external/file?bucketId="
+ MediaSetUtils.CAMERA_BUCKET_ID);
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(uri, ContentResolver.CURSOR_DIR_BASE_TYPE + "/image");
((Activity) mActivity).startActivity(intent);
}
}
private void setResult() {
Intent result = null;
result = new Intent();
result.putExtra(KEY_RETURN_INDEX_HINT, mCurrentIndex);
setStateResult(Activity.RESULT_OK, result);
}
//////////////////////////////////////////////////////////////////////////
// AppBridge.Server interface
//////////////////////////////////////////////////////////////////////////
@Override
public void setCameraRelativeFrame(Rect frame) {
mPhotoView.setCameraRelativeFrame(frame);
}
@Override
public boolean switchWithCaptureAnimation(int offset) {
return mPhotoView.switchWithCaptureAnimation(offset);
}
@Override
public void setSwipingEnabled(boolean enabled) {
mPhotoView.setSwipingEnabled(enabled);
}
@Override
public void notifyScreenNailChanged() {
mScreenNailItem.setScreenNail(mAppBridge.attachScreenNail());
mScreenNailSet.notifyChange();
}
@Override
public void addSecureAlbumItem(boolean isVideo, int id) {
mSecureAlbum.addMediaItem(isVideo, id);
}
@Override
protected boolean onCreateActionBar(Menu menu) {
mActionBar.createActionBarMenu(R.menu.photo, menu);
updateMenuOperations();
updateTitle();
return true;
}
private MenuExecutor.ProgressListener mConfirmDialogListener =
new MenuExecutor.ProgressListener() {
@Override
public void onProgressUpdate(int index) {}
@Override
public void onProgressComplete(int result) {}
@Override
public void onConfirmDialogShown() {
mHandler.removeMessages(MSG_HIDE_BARS);
}
@Override
public void onConfirmDialogDismissed(boolean confirmed) {
refreshHidingMessage();
}
@Override
public void onProgressStart() {}
};
@Override
protected boolean onItemSelected(MenuItem item) {
if (mModel == null) return true;
refreshHidingMessage();
MediaItem current = mModel.getMediaItem(0);
if (current == null) {
// item is not ready, ignore
return true;
}
int currentIndex = mModel.getCurrentIndex();
Path path = current.getPath();
DataManager manager = mActivity.getDataManager();
int action = item.getItemId();
String confirmMsg = null;
switch (action) {
case android.R.id.home: {
onUpPressed();
return true;
}
case R.id.action_grid: {
if (mStartedFromAlbumPage) {
onUpPressed();
} else {
preparePhotoFallbackView();
Bundle data = new Bundle(getData());
data.putString(AlbumPage.KEY_MEDIA_PATH, mOriginalSetPathString);
data.putString(AlbumPage.KEY_PARENT_MEDIA_PATH,
mActivity.getDataManager().getTopSetPath(
DataManager.INCLUDE_ALL));
mActivity.getTransitionStore().put(
KEY_RETURN_INDEX_HINT, mCurrentIndex);
mActivity.getStateManager().startState(AlbumPage.class, data);
}
return true;
}
case R.id.action_slideshow: {
Bundle data = new Bundle();
data.putString(SlideshowPage.KEY_SET_PATH, mMediaSet.getPath().toString());
data.putString(SlideshowPage.KEY_ITEM_PATH, path.toString());
data.putInt(SlideshowPage.KEY_PHOTO_INDEX, currentIndex);
data.putBoolean(SlideshowPage.KEY_REPEAT, true);
mActivity.getStateManager().startStateForResult(
SlideshowPage.class, REQUEST_SLIDESHOW, data);
return true;
}
case R.id.action_crop: {
Activity activity = mActivity;
Intent intent = new Intent(CropImage.CROP_ACTION);
intent.setClass(activity, CropImage.class);
intent.setData(manager.getContentUri(path));
activity.startActivityForResult(intent, PicasaSource.isPicasaImage(current)
? REQUEST_CROP_PICASA
: REQUEST_CROP);
return true;
}
case R.id.action_trim: {
Intent intent = new Intent(mActivity, TrimVideo.class);
intent.setData(manager.getContentUri(path));
// We need the file path to wrap this into a RandomAccessFile.
intent.putExtra(KEY_MEDIA_ITEM_PATH, current.getFilePath());
mActivity.startActivityForResult(intent, REQUEST_TRIM);
return true;
}
case R.id.action_edit: {
launchPhotoEditor();
return true;
}
case R.id.action_details: {
if (mShowDetails) {
hideDetails();
} else {
showDetails();
}
return true;
}
case R.id.action_delete:
confirmMsg = mActivity.getResources().getQuantityString(
R.plurals.delete_selection, 1);
case R.id.action_setas:
case R.id.action_rotate_ccw:
case R.id.action_rotate_cw:
case R.id.action_show_on_map:
mSelectionManager.deSelectAll();
mSelectionManager.toggle(path);
mMenuExecutor.onMenuClicked(item, confirmMsg, mConfirmDialogListener);
return true;
case R.id.action_import:
mSelectionManager.deSelectAll();
mSelectionManager.toggle(path);
mMenuExecutor.onMenuClicked(item, confirmMsg,
new ImportCompleteListener(mActivity));
return true;
case R.id.action_share:
Activity activity = mActivity;
Intent intent = createShareIntent(mCurrentPhoto.getPath());
activity.startActivity(Intent.createChooser(intent,
activity.getString(R.string.share)));
return true;
default :
return false;
}
}
private void hideDetails() {
mShowDetails = false;
mDetailsHelper.hide();
}
private void showDetails() {
mShowDetails = true;
if (mDetailsHelper == null) {
mDetailsHelper = new DetailsHelper(mActivity, mRootPane, new MyDetailsSource());
mDetailsHelper.setCloseListener(new CloseListener() {
@Override
public void onClose() {
hideDetails();
}
});
}
mDetailsHelper.show();
}
////////////////////////////////////////////////////////////////////////////
// Callbacks from PhotoView
////////////////////////////////////////////////////////////////////////////
@Override
public void onSingleTapUp(int x, int y) {
if (mAppBridge != null) {
if (mAppBridge.onSingleTapUp(x, y)) return;
}
MediaItem item = mModel.getMediaItem(0);
if (item == null || item == mScreenNailItem) {
// item is not ready or it is camera preview, ignore
return;
}
boolean playVideo = (mSecureAlbum == null) &&
((item.getSupportedOperations() & MediaItem.SUPPORT_PLAY) != 0);
boolean viewPanorama = (mSecureAlbum == null) &&
(item.getSupportedOperations() & MediaItem.SUPPORT_PANORAMA) != 0;
if (playVideo) {
// determine if the point is at center (1/6) of the photo view.
// (The position of the "play" icon is at center (1/6) of the photo)
int w = mPhotoView.getWidth();
int h = mPhotoView.getHeight();
playVideo = (Math.abs(x - w / 2) * 12 <= w)
&& (Math.abs(y - h / 2) * 12 <= h);
}
if (playVideo) {
playVideo(mActivity, item.getPlayUri(), item.getName());
} else if (viewPanorama) {
LightCycleHelper.viewPanorama(mActivity, item.getContentUri());
} else {
toggleBars();
}
}
@Override
public void lockOrientation() {
mHandler.sendEmptyMessage(MSG_LOCK_ORIENTATION);
}
@Override
public void unlockOrientation() {
mHandler.sendEmptyMessage(MSG_UNLOCK_ORIENTATION);
}
@Override
public void onActionBarAllowed(boolean allowed) {
mActionBarAllowed = allowed;
mHandler.sendEmptyMessage(MSG_UPDATE_ACTION_BAR);
}
@Override
public void onActionBarWanted() {
mHandler.sendEmptyMessage(MSG_WANT_BARS);
}
@Override
public void onFullScreenChanged(boolean full) {
Message m = mHandler.obtainMessage(
MSG_ON_FULL_SCREEN_CHANGED, full ? 1 : 0, 0);
m.sendToTarget();
}
// How we do delete/undo:
//
// When the user choose to delete a media item, we just tell the
// FilterDeleteSet to hide that item. If the user choose to undo it, we
// again tell FilterDeleteSet not to hide it. If the user choose to commit
// the deletion, we then actually delete the media item.
@Override
public void onDeleteImage(Path path, int offset) {
onCommitDeleteImage(); // commit the previous deletion
mDeletePath = path;
mDeleteIsFocus = (offset == 0);
mMediaSet.addDeletion(path, mCurrentIndex + offset);
}
@Override
public void onUndoDeleteImage() {
if (mDeletePath == null) return;
// If the deletion was done on the focused item, we want the model to
// focus on it when it is undeleted.
if (mDeleteIsFocus) mModel.setFocusHintPath(mDeletePath);
mMediaSet.removeDeletion(mDeletePath);
mDeletePath = null;
}
@Override
public void onCommitDeleteImage() {
if (mDeletePath == null) return;
mSelectionManager.deSelectAll();
mSelectionManager.toggle(mDeletePath);
mMenuExecutor.onMenuClicked(R.id.action_delete, null, true, false);
mDeletePath = null;
}
public static void playVideo(Activity activity, Uri uri, String title) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW)
.setDataAndType(uri, "video/*")
.putExtra(Intent.EXTRA_TITLE, title)
.putExtra(MovieActivity.KEY_TREAT_UP_AS_BACK, true);
activity.startActivityForResult(intent, REQUEST_PLAY_VIDEO);
} catch (ActivityNotFoundException e) {
Toast.makeText(activity, activity.getString(R.string.video_err),
Toast.LENGTH_SHORT).show();
}
}
private void setCurrentPhotoByIntent(Intent intent) {
if (intent == null) return;
Path path = mApplication.getDataManager()
.findPathByUri(intent.getData(), intent.getType());
if (path != null) {
mModel.setCurrentPhoto(path, mCurrentIndex);
}
}
@Override
protected void onStateResult(int requestCode, int resultCode, Intent data) {
mHasActivityResult = true;
switch (requestCode) {
case REQUEST_EDIT:
setCurrentPhotoByIntent(data);
break;
case REQUEST_CROP:
if (resultCode == Activity.RESULT_OK) {
setCurrentPhotoByIntent(data);
}
break;
case REQUEST_CROP_PICASA: {
if (resultCode == Activity.RESULT_OK) {
Context context = mActivity.getAndroidContext();
String message = context.getString(R.string.crop_saved,
context.getString(R.string.folder_download));
Toast.makeText(context, message, Toast.LENGTH_SHORT).show();
}
break;
}
case REQUEST_SLIDESHOW: {
if (data == null) break;
String path = data.getStringExtra(SlideshowPage.KEY_ITEM_PATH);
int index = data.getIntExtra(SlideshowPage.KEY_PHOTO_INDEX, 0);
if (path != null) {
mModel.setCurrentPhoto(Path.fromString(path), index);
}
}
}
}
@Override
protected void clearStateResult() {
mHasActivityResult = false;
}
private class PreparePhotoFallback implements OnGLIdleListener {
private PhotoFallbackEffect mPhotoFallback = new PhotoFallbackEffect();
private boolean mResultReady = false;
public synchronized PhotoFallbackEffect get() {
while (!mResultReady) {
Utils.waitWithoutInterrupt(this);
}
return mPhotoFallback;
}
@Override
public boolean onGLIdle(GLCanvas canvas, boolean renderRequested) {
mPhotoFallback = mPhotoView.buildFallbackEffect(mRootPane, canvas);
synchronized (this) {
mResultReady = true;
notifyAll();
}
return false;
}
}
private void preparePhotoFallbackView() {
GLRoot root = mActivity.getGLRoot();
PreparePhotoFallback task = new PreparePhotoFallback();
root.unlockRenderThread();
PhotoFallbackEffect anim;
try {
root.addOnGLIdleListener(task);
anim = task.get();
} finally {
root.lockRenderThread();
}
mActivity.getTransitionStore().put(
AlbumPage.KEY_RESUME_ANIMATION, anim);
}
@Override
public void onPause() {
super.onPause();
mIsActive = false;
mActivity.getGLRoot().unfreeze();
mHandler.removeMessages(MSG_UNFREEZE_GLROOT);
DetailsHelper.pause();
if (mModel != null) {
if (isFinishing()) preparePhotoFallbackView();
mModel.pause();
}
mPhotoView.pause();
mHandler.removeMessages(MSG_HIDE_BARS);
mActionBar.removeOnMenuVisibilityListener(mMenuVisibilityListener);
onCommitDeleteImage();
mMenuExecutor.pause();
if (mMediaSet != null) mMediaSet.clearDeletion();
}
@Override
public void onCurrentImageUpdated() {
mActivity.getGLRoot().unfreeze();
}
private void setGridButtonVisibility(boolean enabled) {
Menu menu = mActionBar.getMenu();
if (menu == null) return;
MenuItem item = menu.findItem(R.id.action_grid);
if (item != null) item.setVisible((mSecureAlbum == null) && enabled);
}
public void onFilmModeChanged(boolean enabled) {
mHandler.sendEmptyMessage(MSG_REFRESH_GRID_BUTTON);
mHandler.sendEmptyMessage(MSG_REFRESH_BOTTOM_CONTROLS);
if (enabled) {
mHandler.removeMessages(MSG_HIDE_BARS);
} else {
refreshHidingMessage();
}
}
private void transitionFromAlbumPageIfNeeded() {
TransitionStore transitions = mActivity.getTransitionStore();
int resumeIndex = transitions.get(KEY_INDEX_HINT, -1);
if (resumeIndex >= 0) {
mCurrentIndex = resumeIndex;
mModel.setCurrentPhoto((Path)transitions.get(KEY_MEDIA_SET_PATH), mCurrentIndex);
mPhotoView.switchToImage(mCurrentIndex);
}
int albumPageTransition = transitions.get(
KEY_ALBUMPAGE_TRANSITION, MSG_ALBUMPAGE_NONE);
if(albumPageTransition != MSG_ALBUMPAGE_NONE) {
mPhotoView.setFilmMode(mStartInFilmstrip
&& albumPageTransition == MSG_ALBUMPAGE_RESUMED);
}
mFadeOutTexture = transitions.get(PreparePageFadeoutTexture.KEY_FADE_TEXTURE);
if (mFadeOutTexture != null) {
mBackgroundFade.start();
BitmapScreenNail.disableDrawPlaceholder();
mOpenAnimationRect =
albumPageTransition == MSG_ALBUMPAGE_NONE ?
(Rect) mData.getParcelable(KEY_OPEN_ANIMATION_RECT) :
(Rect) transitions.get(KEY_OPEN_ANIMATION_RECT);
mPhotoView.setOpenAnimationRect(mOpenAnimationRect);
mBackgroundFade.start();
}
}
@Override
protected void onResume() {
super.onResume();
if (mModel == null) {
mActivity.getStateManager().finishState(this);
return;
}
transitionFromAlbumPageIfNeeded();
mActivity.getGLRoot().freeze();
mIsActive = true;
setContentPane(mRootPane);
mModel.resume();
mPhotoView.resume();
mActionBar.setDisplayOptions(
((mSecureAlbum == null) && (mSetPathString != null)), true);
mActionBar.addOnMenuVisibilityListener(mMenuVisibilityListener);
if (mAppBridge != null && !mHasActivityResult) {
mPhotoView.resetToFirstPicture();
}
mHasActivityResult = false;
mHandler.sendEmptyMessageDelayed(MSG_UNFREEZE_GLROOT, UNFREEZE_GLROOT_TIMEOUT);
}
@Override
protected void onDestroy() {
if (mAppBridge != null) {
mAppBridge.setServer(null);
mScreenNailItem.setScreenNail(null);
mAppBridge.detachScreenNail();
mAppBridge = null;
mScreenNailSet = null;
mScreenNailItem = null;
}
mOrientationManager.removeListener(this);
mActivity.getGLRoot().setOrientationSource(null);
if (mBottomControls != null) mBottomControls.cleanup();
// Remove all pending messages.
mHandler.removeCallbacksAndMessages(null);
super.onDestroy();
}
private class MyDetailsSource implements DetailsSource {
@Override
public MediaDetails getDetails() {
return mModel.getMediaItem(0).getDetails();
}
@Override
public int size() {
return mMediaSet != null ? mMediaSet.getMediaItemCount() : 1;
}
@Override
public int setIndex() {
return mModel.getCurrentIndex();
}
}
}
| false | true | public void onCreate(Bundle data, Bundle restoreState) {
super.onCreate(data, restoreState);
mActionBar = mActivity.getGalleryActionBar();
mSelectionManager = new SelectionManager(mActivity, false);
mMenuExecutor = new MenuExecutor(mActivity, mSelectionManager);
mPhotoView = new PhotoView(mActivity);
mPhotoView.setListener(this);
mRootPane.addComponent(mPhotoView);
mApplication = (GalleryApp) ((Activity) mActivity).getApplication();
mOrientationManager = mActivity.getOrientationManager();
mOrientationManager.addListener(this);
mActivity.getGLRoot().setOrientationSource(mOrientationManager);
mHandler = new SynchronizedHandler(mActivity.getGLRoot()) {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case MSG_HIDE_BARS: {
hideBars();
break;
}
case MSG_REFRESH_GRID_BUTTON: {
setGridButtonVisibility(mPhotoView.getFilmMode());
break;
}
case MSG_REFRESH_BOTTOM_CONTROLS: {
if (mBottomControls != null) mBottomControls.refresh();
break;
}
case MSG_LOCK_ORIENTATION: {
mOrientationManager.lockOrientation();
break;
}
case MSG_UNLOCK_ORIENTATION: {
mOrientationManager.unlockOrientation();
break;
}
case MSG_ON_FULL_SCREEN_CHANGED: {
mAppBridge.onFullScreenChanged(message.arg1 == 1);
break;
}
case MSG_UPDATE_ACTION_BAR: {
updateBars();
break;
}
case MSG_WANT_BARS: {
wantBars();
break;
}
case MSG_UNFREEZE_GLROOT: {
mActivity.getGLRoot().unfreeze();
break;
}
default: throw new AssertionError(message.what);
}
}
};
mSetPathString = data.getString(KEY_MEDIA_SET_PATH);
mOriginalSetPathString = mSetPathString;
mNfcAdapter = NfcAdapter.getDefaultAdapter(mActivity.getAndroidContext());
String itemPathString = data.getString(KEY_MEDIA_ITEM_PATH);
Path itemPath = itemPathString != null ?
Path.fromString(data.getString(KEY_MEDIA_ITEM_PATH)) :
null;
mTreatBackAsUp = data.getBoolean(KEY_TREAT_BACK_AS_UP, false);
mStartInFilmstrip =
data.getBoolean(KEY_START_IN_FILMSTRIP, false);
mStartedFromAlbumPage =
data.getInt(KEY_ALBUMPAGE_TRANSITION,
MSG_ALBUMPAGE_NONE) == MSG_ALBUMPAGE_STARTED;
setGridButtonVisibility(!mStartedFromAlbumPage);
if (mSetPathString != null) {
mAppBridge = (AppBridge) data.getParcelable(KEY_APP_BRIDGE);
if (mAppBridge != null) {
mFlags |= FLAG_HIDE_ACTION_BAR | FLAG_HIDE_STATUS_BAR;
mShowBars = false;
mAppBridge.setServer(this);
mOrientationManager.lockOrientation();
// Get the ScreenNail from AppBridge and register it.
int id = SnailSource.newId();
Path screenNailSetPath = SnailSource.getSetPath(id);
Path screenNailItemPath = SnailSource.getItemPath(id);
mScreenNailSet = (SnailAlbum) mActivity.getDataManager()
.getMediaObject(screenNailSetPath);
mScreenNailItem = (SnailItem) mActivity.getDataManager()
.getMediaObject(screenNailItemPath);
mScreenNailItem.setScreenNail(mAppBridge.attachScreenNail());
// Check if the path is a secure album.
if (SecureSource.isSecurePath(mSetPathString)) {
mSecureAlbum = (SecureAlbum) mActivity.getDataManager()
.getMediaSet(mSetPathString);
}
if (data.getBoolean(KEY_SHOW_WHEN_LOCKED, false)) {
// Set the flag to be on top of the lock screen.
mFlags |= FLAG_SHOW_WHEN_LOCKED;
}
// Combine the original MediaSet with the one for ScreenNail
// from AppBridge.
mSetPathString = "/combo/item/{" + screenNailSetPath +
"," + mSetPathString + "}";
// Start from the screen nail.
itemPath = screenNailItemPath;
}
MediaSet originalSet = mActivity.getDataManager()
.getMediaSet(mSetPathString);
mSelectionManager.setSourceMediaSet(originalSet);
mSetPathString = "/filter/delete/{" + mSetPathString + "}";
mMediaSet = (FilterDeleteSet) mActivity.getDataManager()
.getMediaSet(mSetPathString);
mCurrentIndex = data.getInt(KEY_INDEX_HINT, 0);
if (mMediaSet == null) {
Log.w(TAG, "failed to restore " + mSetPathString);
}
if (itemPath == null) {
if (mMediaSet.getMediaItemCount() > 0) {
itemPath = mMediaSet.getMediaItem(mCurrentIndex, 1)
.get(0).getPath();
} else {
return;
}
}
PhotoDataAdapter pda = new PhotoDataAdapter(
mActivity, mPhotoView, mMediaSet, itemPath, mCurrentIndex,
mAppBridge == null ? -1 : 0,
mAppBridge == null ? false : mAppBridge.isPanorama(),
mAppBridge == null ? false : mAppBridge.isStaticCamera());
mModel = pda;
mPhotoView.setModel(mModel);
pda.setDataListener(new PhotoDataAdapter.DataListener() {
@Override
public void onPhotoChanged(int index, Path item) {
mCurrentIndex = index;
if (item != null) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
}
updateBars();
}
@Override
public void onLoadingFinished() {
if (!mModel.isEmpty()) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
} else if (mIsActive) {
// We only want to finish the PhotoPage if there is no
// deletion that the user can undo.
if (mMediaSet.getNumberOfDeletions() == 0) {
mActivity.getStateManager().finishState(
PhotoPage.this);
}
}
}
@Override
public void onLoadingStarted() {
}
});
} else {
// Get default media set by the URI
MediaItem mediaItem = (MediaItem)
mActivity.getDataManager().getMediaObject(itemPath);
mModel = new SinglePhotoDataAdapter(mActivity, mPhotoView, mediaItem);
mPhotoView.setModel(mModel);
updateCurrentPhoto(mediaItem);
}
mPhotoView.setFilmMode(mStartInFilmstrip && mMediaSet.getMediaItemCount() > 1);
if (mSecureAlbum == null) {
RelativeLayout galleryRoot = (RelativeLayout) ((Activity) mActivity)
.findViewById(mAppBridge != null ? R.id.content : R.id.gallery_root);
if (galleryRoot != null) {
mBottomControls = new PhotoPageBottomControls(this, mActivity, galleryRoot);
}
}
}
| public void onCreate(Bundle data, Bundle restoreState) {
super.onCreate(data, restoreState);
mActionBar = mActivity.getGalleryActionBar();
mSelectionManager = new SelectionManager(mActivity, false);
mMenuExecutor = new MenuExecutor(mActivity, mSelectionManager);
mPhotoView = new PhotoView(mActivity);
mPhotoView.setListener(this);
mRootPane.addComponent(mPhotoView);
mApplication = (GalleryApp) ((Activity) mActivity).getApplication();
mOrientationManager = mActivity.getOrientationManager();
mOrientationManager.addListener(this);
mActivity.getGLRoot().setOrientationSource(mOrientationManager);
mHandler = new SynchronizedHandler(mActivity.getGLRoot()) {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case MSG_HIDE_BARS: {
hideBars();
break;
}
case MSG_REFRESH_GRID_BUTTON: {
setGridButtonVisibility(mPhotoView.getFilmMode());
break;
}
case MSG_REFRESH_BOTTOM_CONTROLS: {
if (mBottomControls != null) mBottomControls.refresh();
break;
}
case MSG_LOCK_ORIENTATION: {
mOrientationManager.lockOrientation();
break;
}
case MSG_UNLOCK_ORIENTATION: {
mOrientationManager.unlockOrientation();
break;
}
case MSG_ON_FULL_SCREEN_CHANGED: {
mAppBridge.onFullScreenChanged(message.arg1 == 1);
break;
}
case MSG_UPDATE_ACTION_BAR: {
updateBars();
break;
}
case MSG_WANT_BARS: {
wantBars();
break;
}
case MSG_UNFREEZE_GLROOT: {
mActivity.getGLRoot().unfreeze();
break;
}
default: throw new AssertionError(message.what);
}
}
};
mSetPathString = data.getString(KEY_MEDIA_SET_PATH);
mOriginalSetPathString = mSetPathString;
mNfcAdapter = NfcAdapter.getDefaultAdapter(mActivity.getAndroidContext());
String itemPathString = data.getString(KEY_MEDIA_ITEM_PATH);
Path itemPath = itemPathString != null ?
Path.fromString(data.getString(KEY_MEDIA_ITEM_PATH)) :
null;
mTreatBackAsUp = data.getBoolean(KEY_TREAT_BACK_AS_UP, false);
mStartInFilmstrip =
data.getBoolean(KEY_START_IN_FILMSTRIP, false);
mStartedFromAlbumPage =
data.getInt(KEY_ALBUMPAGE_TRANSITION,
MSG_ALBUMPAGE_NONE) == MSG_ALBUMPAGE_STARTED;
setGridButtonVisibility(!mStartedFromAlbumPage);
if (mSetPathString != null) {
mAppBridge = (AppBridge) data.getParcelable(KEY_APP_BRIDGE);
if (mAppBridge != null) {
mFlags |= FLAG_HIDE_ACTION_BAR | FLAG_HIDE_STATUS_BAR;
mShowBars = false;
mAppBridge.setServer(this);
mOrientationManager.lockOrientation();
// Get the ScreenNail from AppBridge and register it.
int id = SnailSource.newId();
Path screenNailSetPath = SnailSource.getSetPath(id);
Path screenNailItemPath = SnailSource.getItemPath(id);
mScreenNailSet = (SnailAlbum) mActivity.getDataManager()
.getMediaObject(screenNailSetPath);
mScreenNailItem = (SnailItem) mActivity.getDataManager()
.getMediaObject(screenNailItemPath);
mScreenNailItem.setScreenNail(mAppBridge.attachScreenNail());
// Check if the path is a secure album.
if (SecureSource.isSecurePath(mSetPathString)) {
mSecureAlbum = (SecureAlbum) mActivity.getDataManager()
.getMediaSet(mSetPathString);
}
if (data.getBoolean(KEY_SHOW_WHEN_LOCKED, false)) {
// Set the flag to be on top of the lock screen.
mFlags |= FLAG_SHOW_WHEN_LOCKED;
}
// Combine the original MediaSet with the one for ScreenNail
// from AppBridge.
mSetPathString = "/combo/item/{" + screenNailSetPath +
"," + mSetPathString + "}";
// Start from the screen nail.
itemPath = screenNailItemPath;
}
MediaSet originalSet = mActivity.getDataManager()
.getMediaSet(mSetPathString);
mSelectionManager.setSourceMediaSet(originalSet);
mSetPathString = "/filter/delete/{" + mSetPathString + "}";
mMediaSet = (FilterDeleteSet) mActivity.getDataManager()
.getMediaSet(mSetPathString);
mCurrentIndex = data.getInt(KEY_INDEX_HINT, 0);
if (mMediaSet == null) {
Log.w(TAG, "failed to restore " + mSetPathString);
}
if (itemPath == null) {
int mediaItemCount = mMediaSet.getMediaItemCount();
if (mediaItemCount > 0) {
if (mCurrentIndex >= mediaItemCount) mCurrentIndex = 0;
itemPath = mMediaSet.getMediaItem(mCurrentIndex, 1)
.get(0).getPath();
} else {
// Bail out, PhotoPage can't load on an empty album
return;
}
}
PhotoDataAdapter pda = new PhotoDataAdapter(
mActivity, mPhotoView, mMediaSet, itemPath, mCurrentIndex,
mAppBridge == null ? -1 : 0,
mAppBridge == null ? false : mAppBridge.isPanorama(),
mAppBridge == null ? false : mAppBridge.isStaticCamera());
mModel = pda;
mPhotoView.setModel(mModel);
pda.setDataListener(new PhotoDataAdapter.DataListener() {
@Override
public void onPhotoChanged(int index, Path item) {
mCurrentIndex = index;
if (item != null) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
}
updateBars();
}
@Override
public void onLoadingFinished() {
if (!mModel.isEmpty()) {
MediaItem photo = mModel.getMediaItem(0);
if (photo != null) updateCurrentPhoto(photo);
} else if (mIsActive) {
// We only want to finish the PhotoPage if there is no
// deletion that the user can undo.
if (mMediaSet.getNumberOfDeletions() == 0) {
mActivity.getStateManager().finishState(
PhotoPage.this);
}
}
}
@Override
public void onLoadingStarted() {
}
});
} else {
// Get default media set by the URI
MediaItem mediaItem = (MediaItem)
mActivity.getDataManager().getMediaObject(itemPath);
mModel = new SinglePhotoDataAdapter(mActivity, mPhotoView, mediaItem);
mPhotoView.setModel(mModel);
updateCurrentPhoto(mediaItem);
}
mPhotoView.setFilmMode(mStartInFilmstrip && mMediaSet.getMediaItemCount() > 1);
if (mSecureAlbum == null) {
RelativeLayout galleryRoot = (RelativeLayout) ((Activity) mActivity)
.findViewById(mAppBridge != null ? R.id.content : R.id.gallery_root);
if (galleryRoot != null) {
mBottomControls = new PhotoPageBottomControls(this, mActivity, galleryRoot);
}
}
}
|
diff --git a/runtime/ceylon/language/$array.java b/runtime/ceylon/language/$array.java
index 803a1fd0..f17c9e69 100644
--- a/runtime/ceylon/language/$array.java
+++ b/runtime/ceylon/language/$array.java
@@ -1,37 +1,37 @@
package ceylon.language;
import java.util.ArrayList;
import java.util.List;
import com.redhat.ceylon.compiler.java.metadata.Ceylon;
import com.redhat.ceylon.compiler.java.metadata.Method;
import com.redhat.ceylon.compiler.java.metadata.Name;
import com.redhat.ceylon.compiler.java.metadata.TypeInfo;
import com.redhat.ceylon.compiler.java.metadata.TypeParameter;
import com.redhat.ceylon.compiler.java.metadata.TypeParameters;
@Ceylon
@Method
public final class $array {
private $array() {}
@TypeParameters(@TypeParameter(value="Element"))
@TypeInfo("ceylon.language.Array<Element>")
public static <Element> Array<? extends Element> $array(
@Name("elements")
@TypeInfo("ceylon.language.Iterable<ceylon.language.Nothing|Element>")
final ceylon.language.Iterable<? extends Element> elements) {
List<Element> list = new ArrayList<Element>();
java.lang.Object $tmp;
for (Iterator<? extends Element> iter=elements.getIterator(); !(($tmp = iter.next()) instanceof Finished);) {
Element elem = (Element)$tmp;
if (elem!=null) list.add(elem);
}
if (list.size() > 0) {
return new ceylon.language.NonemptyArray<Element>(list);
} else {
- return arrayOfNone.arrayOfNone();
+ return arrayOfNone.<Element>arrayOfNone();
}
}
}
| true | true | public static <Element> Array<? extends Element> $array(
@Name("elements")
@TypeInfo("ceylon.language.Iterable<ceylon.language.Nothing|Element>")
final ceylon.language.Iterable<? extends Element> elements) {
List<Element> list = new ArrayList<Element>();
java.lang.Object $tmp;
for (Iterator<? extends Element> iter=elements.getIterator(); !(($tmp = iter.next()) instanceof Finished);) {
Element elem = (Element)$tmp;
if (elem!=null) list.add(elem);
}
if (list.size() > 0) {
return new ceylon.language.NonemptyArray<Element>(list);
} else {
return arrayOfNone.arrayOfNone();
}
}
| public static <Element> Array<? extends Element> $array(
@Name("elements")
@TypeInfo("ceylon.language.Iterable<ceylon.language.Nothing|Element>")
final ceylon.language.Iterable<? extends Element> elements) {
List<Element> list = new ArrayList<Element>();
java.lang.Object $tmp;
for (Iterator<? extends Element> iter=elements.getIterator(); !(($tmp = iter.next()) instanceof Finished);) {
Element elem = (Element)$tmp;
if (elem!=null) list.add(elem);
}
if (list.size() > 0) {
return new ceylon.language.NonemptyArray<Element>(list);
} else {
return arrayOfNone.<Element>arrayOfNone();
}
}
|
diff --git a/src/uk/me/parabola/imgfmt/sys/Directory.java b/src/uk/me/parabola/imgfmt/sys/Directory.java
index 1342cd60..61f0b4e8 100644
--- a/src/uk/me/parabola/imgfmt/sys/Directory.java
+++ b/src/uk/me/parabola/imgfmt/sys/Directory.java
@@ -1,220 +1,220 @@
/*
* Copyright (C) 2006 Steve Ratcliffe
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* Author: Steve Ratcliffe
* Create date: 26-Nov-2006
*/
package uk.me.parabola.imgfmt.sys;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import uk.me.parabola.imgfmt.FileExistsException;
import uk.me.parabola.imgfmt.Utils;
import uk.me.parabola.imgfmt.fs.DirectoryEntry;
import uk.me.parabola.imgfmt.fs.ImgChannel;
import uk.me.parabola.log.Logger;
/**
* The directory. There is only one directory and it contains the
* file names and block information. On disk each entry is a
* multiple of the block size.
*
* @author Steve Ratcliffe
*/
class Directory {
private static final Logger log = Logger.getLogger(Directory.class);
//private final FileChannel file;
private ImgChannel chan;
private final BlockManager headerBlockManager;
private long startPos;
// The list of files themselves.
private final Map<String, DirectoryEntry> entries = new LinkedHashMap<String, DirectoryEntry>();
Directory(BlockManager headerBlockManager) {
this.headerBlockManager = headerBlockManager;
}
/**
* Create a new file in the directory.
*
* @param name The file name. Must be 8+3 characters.
* @param blockManager To allocate blocks for the created file entry.
* @return The new directory entity.
* @throws FileExistsException If the entry already
* exists.
*/
Dirent create(String name, BlockManager blockManager) throws FileExistsException {
// Check to see if it is already there.
if (entries.get(name) != null)
throw new FileExistsException("File " + name + " already exists");
Dirent ent;
if (name.equals(ImgFS.DIRECTORY_FILE_NAME)) {
ent = new HeaderDirent(name, blockManager);
} else {
ent = new Dirent(name, blockManager);
}
addEntry(ent);
return ent;
}
/**
* Initialise the directory for reading the file. The whole directory
* is read in.
*
* @throws IOException If it cannot be read.
*/
void readInit(byte xorByte) throws IOException {
assert chan != null;
ByteBuffer buf = ByteBuffer.allocate(512);
buf.order(ByteOrder.LITTLE_ENDIAN);
chan.position(startPos);
Dirent current = null;
while ((chan.read(buf)) > 0) {
buf.flip();
if(xorByte != 0) {
byte[] bufBytes = buf.array();
for(int i = 0; i < bufBytes.length; ++i)
bufBytes[i] ^= xorByte;
}
int used = buf.get(Dirent.OFF_FILE_USED);
if (used != 1)
continue;
String name = Utils.bytesToString(buf, Dirent.OFF_NAME, Dirent.MAX_FILE_LEN);
String ext = Utils.bytesToString(buf, Dirent.OFF_EXT, Dirent.MAX_EXT_LEN);
log.debug("readinit name", name, ext);
int flag = buf.get(Dirent.OFF_FLAG);
int part = buf.get(Dirent.OFF_FILE_PART) & 0xff;
if (flag == 3 && current == null) {
current = (Dirent) entries.get(ImgFS.DIRECTORY_FILE_NAME);
current.initBlocks(buf);
} else if (part == 0) {
current = create(name + '.' + ext, headerBlockManager);
current.initBlocks(buf);
} else {
assert current != null;
current.initBlocks(buf);
}
buf.clear();
}
}
/**
* Write out the directory to the file. The file should be correctly
* positioned by the caller.
*
* @throws IOException If there is a problem writing out any
* of the directory entries.
*/
public void sync() throws IOException {
// The first entry can't really be written until the rest of the directory is
// so we have to step through once to calculate the size and then again
// to write it out.
int blocks = 0;
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
log.debug("ent size", ent.getSize());
int n = ent.numberHeaderBlocks();
blocks += n;
}
// Save the current position
long dirPosition = chan.position();
int blockSize = headerBlockManager.getBlockSize();
- int forHeader = (blocks + DirectoryEntry.SLOTS_PER_ENTRY - 1)/DirectoryEntry.SLOTS_PER_ENTRY;
+ int forHeader = (blocks + Dirent.ENTRY_SIZE - 1)/Dirent.ENTRY_SIZE;
log.debug("header blocks needed", forHeader);
// There is nothing really wrong with larger values (perhaps, I don't
// know for sure!) but the code is written to make it 1, so make sure that it is.
- //assert forHeader == 1;
+ assert forHeader == 1;
// Write the blocks that will will contain the header blocks.
chan.position(dirPosition + (long) forHeader * Dirent.ENTRY_SIZE);
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
if (!ent.isSpecial()) {
log.debug("wrting ", dir.getFullName(), " at ", chan.position());
log.debug("ent size", ent.getSize());
ent.sync(chan);
}
}
long end = (long) blockSize * headerBlockManager.getMaxBlock();
ByteBuffer buf = ByteBuffer.allocate((int) (end - chan.position()));
for (int i = 0; i < buf.capacity(); i++)
buf.put((byte) 0);
buf.flip();
chan.write(buf);
// Now go back and write in the directory entry for the header.
chan.position(dirPosition);
Dirent ent = (Dirent) entries.values().iterator().next();
log.debug("ent header size", ent.getSize());
ent.sync(chan);
}
/**
* Get the entries. Used for listing the directory.
*
* @return A list of the directory entries. They will be in the same
* order as in the file.
*/
public List<DirectoryEntry> getEntries() {
return new ArrayList<DirectoryEntry>(entries.values());
}
/**
* Add an entry to the directory.
*
* @param ent The entry to add.
*/
private void addEntry(DirectoryEntry ent) {
entries.put(ent.getFullName(), ent);
}
public void setFile(ImgChannel chan) {
this.chan = chan;
}
public void setStartPos(long startPos) {
this.startPos = startPos;
}
public DirectoryEntry lookup(String name) {
return entries.get(name);
}
}
| false | true | public void sync() throws IOException {
// The first entry can't really be written until the rest of the directory is
// so we have to step through once to calculate the size and then again
// to write it out.
int blocks = 0;
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
log.debug("ent size", ent.getSize());
int n = ent.numberHeaderBlocks();
blocks += n;
}
// Save the current position
long dirPosition = chan.position();
int blockSize = headerBlockManager.getBlockSize();
int forHeader = (blocks + DirectoryEntry.SLOTS_PER_ENTRY - 1)/DirectoryEntry.SLOTS_PER_ENTRY;
log.debug("header blocks needed", forHeader);
// There is nothing really wrong with larger values (perhaps, I don't
// know for sure!) but the code is written to make it 1, so make sure that it is.
//assert forHeader == 1;
// Write the blocks that will will contain the header blocks.
chan.position(dirPosition + (long) forHeader * Dirent.ENTRY_SIZE);
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
if (!ent.isSpecial()) {
log.debug("wrting ", dir.getFullName(), " at ", chan.position());
log.debug("ent size", ent.getSize());
ent.sync(chan);
}
}
long end = (long) blockSize * headerBlockManager.getMaxBlock();
ByteBuffer buf = ByteBuffer.allocate((int) (end - chan.position()));
for (int i = 0; i < buf.capacity(); i++)
buf.put((byte) 0);
buf.flip();
chan.write(buf);
// Now go back and write in the directory entry for the header.
chan.position(dirPosition);
Dirent ent = (Dirent) entries.values().iterator().next();
log.debug("ent header size", ent.getSize());
ent.sync(chan);
}
| public void sync() throws IOException {
// The first entry can't really be written until the rest of the directory is
// so we have to step through once to calculate the size and then again
// to write it out.
int blocks = 0;
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
log.debug("ent size", ent.getSize());
int n = ent.numberHeaderBlocks();
blocks += n;
}
// Save the current position
long dirPosition = chan.position();
int blockSize = headerBlockManager.getBlockSize();
int forHeader = (blocks + Dirent.ENTRY_SIZE - 1)/Dirent.ENTRY_SIZE;
log.debug("header blocks needed", forHeader);
// There is nothing really wrong with larger values (perhaps, I don't
// know for sure!) but the code is written to make it 1, so make sure that it is.
assert forHeader == 1;
// Write the blocks that will will contain the header blocks.
chan.position(dirPosition + (long) forHeader * Dirent.ENTRY_SIZE);
for (DirectoryEntry dir : entries.values()) {
Dirent ent = (Dirent) dir;
if (!ent.isSpecial()) {
log.debug("wrting ", dir.getFullName(), " at ", chan.position());
log.debug("ent size", ent.getSize());
ent.sync(chan);
}
}
long end = (long) blockSize * headerBlockManager.getMaxBlock();
ByteBuffer buf = ByteBuffer.allocate((int) (end - chan.position()));
for (int i = 0; i < buf.capacity(); i++)
buf.put((byte) 0);
buf.flip();
chan.write(buf);
// Now go back and write in the directory entry for the header.
chan.position(dirPosition);
Dirent ent = (Dirent) entries.values().iterator().next();
log.debug("ent header size", ent.getSize());
ent.sync(chan);
}
|
diff --git a/bundles/servlets/post/src/main/java/org/apache/sling/servlets/post/impl/operations/MoveOperation.java b/bundles/servlets/post/src/main/java/org/apache/sling/servlets/post/impl/operations/MoveOperation.java
index a657da74ee..c390c49311 100644
--- a/bundles/servlets/post/src/main/java/org/apache/sling/servlets/post/impl/operations/MoveOperation.java
+++ b/bundles/servlets/post/src/main/java/org/apache/sling/servlets/post/impl/operations/MoveOperation.java
@@ -1,59 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.servlets.post.impl.operations;
import java.util.List;
import javax.jcr.Item;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import org.apache.sling.servlets.post.Modification;
/**
* The <code>MoveOperation</code> class implements the
* {@link org.apache.sling.servlets.post.SlingPostConstants#OPERATION_MOVE move}
* operation for the Sling default POST servlet.
*/
public class MoveOperation extends AbstractCopyMoveOperation {
@Override
protected String getOperationName() {
return "move";
}
@Override
protected void execute(List<Modification> changes, Item source,
String destParent, String destName) throws RepositoryException {
if (destName == null) {
destName = source.getName();
}
String sourcePath = source.getPath();
+ if (destParent.equals("/")) {
+ destParent = "";
+ }
String destPath = destParent + "/" + destName;
Session session = source.getSession();
if (session.itemExists(destPath)) {
session.getItem(destPath).remove();
}
session.move(sourcePath, destPath);
changes.add(Modification.onMoved(sourcePath, destPath));
}
}
| true | true | protected void execute(List<Modification> changes, Item source,
String destParent, String destName) throws RepositoryException {
if (destName == null) {
destName = source.getName();
}
String sourcePath = source.getPath();
String destPath = destParent + "/" + destName;
Session session = source.getSession();
if (session.itemExists(destPath)) {
session.getItem(destPath).remove();
}
session.move(sourcePath, destPath);
changes.add(Modification.onMoved(sourcePath, destPath));
}
| protected void execute(List<Modification> changes, Item source,
String destParent, String destName) throws RepositoryException {
if (destName == null) {
destName = source.getName();
}
String sourcePath = source.getPath();
if (destParent.equals("/")) {
destParent = "";
}
String destPath = destParent + "/" + destName;
Session session = source.getSession();
if (session.itemExists(destPath)) {
session.getItem(destPath).remove();
}
session.move(sourcePath, destPath);
changes.add(Modification.onMoved(sourcePath, destPath));
}
|
diff --git a/src/com/adencraft2000/bouncysponge/BouncySpongePlayerListener.java b/src/com/adencraft2000/bouncysponge/BouncySpongePlayerListener.java
index 8c230df..9d6aa77 100644
--- a/src/com/adencraft2000/bouncysponge/BouncySpongePlayerListener.java
+++ b/src/com/adencraft2000/bouncysponge/BouncySpongePlayerListener.java
@@ -1,53 +1,53 @@
package com.adencraft2000.bouncysponge;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.entity.Player;
import org.bukkit.event.*;
import org.bukkit.event.player.*;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.util.Vector;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.configuration.*;
import org.bukkit.configuration.file.*;
public class BouncySpongePlayerListener implements Listener{
public BouncySponge plugin;
/**
* Constructor for PlayerListener
* @param instance Grabs an instance of BouncySponge
*/
public BouncySpongePlayerListener(BouncySponge instance) {
plugin = instance;
}
/**
* Calls when player moves
* If a player moves on to a Sponge he is catapulted up in the air!
* @param ev A PlayerMoveEvent object
*/
@EventHandler
public void onPlayerMove(PlayerMoveEvent ev){
if (!ev.getFrom().getBlock().getLocation().equals(ev.getTo().getBlock().getLocation())) {
Player player = ev.getPlayer();
- if (player.isOp() || player.hasPermission("bouncysponge.jump")) {
+ if (player.hasPermission("bouncysponge.jump")) {
Block block = player.getLocation().getBlock().getRelative(BlockFace.DOWN);
if (block.getType() == Material.SPONGE) {
if (player.isSneaking()){
}
else{
Vector dir = player.getLocation().getDirection().multiply(1.75);
Vector vec = new Vector(dir.getX(), plugin.getConfig().getDouble("launch"), dir.getZ());
player.setVelocity(vec);
player.setNoDamageTicks(400);
}
}
}
}
}
}
| true | true | public void onPlayerMove(PlayerMoveEvent ev){
if (!ev.getFrom().getBlock().getLocation().equals(ev.getTo().getBlock().getLocation())) {
Player player = ev.getPlayer();
if (player.isOp() || player.hasPermission("bouncysponge.jump")) {
Block block = player.getLocation().getBlock().getRelative(BlockFace.DOWN);
if (block.getType() == Material.SPONGE) {
if (player.isSneaking()){
}
else{
Vector dir = player.getLocation().getDirection().multiply(1.75);
Vector vec = new Vector(dir.getX(), plugin.getConfig().getDouble("launch"), dir.getZ());
player.setVelocity(vec);
player.setNoDamageTicks(400);
}
}
}
}
}
| public void onPlayerMove(PlayerMoveEvent ev){
if (!ev.getFrom().getBlock().getLocation().equals(ev.getTo().getBlock().getLocation())) {
Player player = ev.getPlayer();
if (player.hasPermission("bouncysponge.jump")) {
Block block = player.getLocation().getBlock().getRelative(BlockFace.DOWN);
if (block.getType() == Material.SPONGE) {
if (player.isSneaking()){
}
else{
Vector dir = player.getLocation().getDirection().multiply(1.75);
Vector vec = new Vector(dir.getX(), plugin.getConfig().getDouble("launch"), dir.getZ());
player.setVelocity(vec);
player.setNoDamageTicks(400);
}
}
}
}
}
|
diff --git a/src/infinity/resource/are/AreResource.java b/src/infinity/resource/are/AreResource.java
index 779c604..ab7c09a 100644
--- a/src/infinity/resource/are/AreResource.java
+++ b/src/infinity/resource/are/AreResource.java
@@ -1,572 +1,573 @@
// Near Infinity - An Infinity Engine Browser and Editor
// Copyright (C) 2001 - 2005 Jon Olav Hauglid
// See LICENSE.txt for license information
package infinity.resource.are;
import infinity.datatype.*;
import infinity.resource.*;
import infinity.resource.key.ResourceEntry;
import infinity.resource.vertex.Vertex;
import infinity.util.Byteconvert;
import javax.swing.*;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Set;
public final class AreResource extends AbstractStruct implements Resource, HasAddRemovable, HasDetailViewer
{
private static final String s_flag[] = {"No flags set", "Outdoor", "Day/Night",
"Weather", "City", "Forest", "Dungeon",
"Extended night", "Can rest"};
private static final String s_flag_torment[] = {"Indoors", "Hive", "", "Clerk's ward", "Lower ward",
"Ravel's maze", "Baator", "Rubikon",
"Negative material plane", "Curst", "Carceri",
"Allow day/night"};
private static final String s_atype[] = {"Normal", "Can't save game", "Tutorial area", "Dead magic zone",
"Dream area"};
private static final String s_atype_torment[] = {"Can rest", "Cannot save",
"Cannot rest", "Cannot save", "Too dangerous to rest",
"Cannot save", "Can rest with permission"};
private static final String s_atype_iwd2[] = {"Normal", "Can't save game", "Cannot rest", "Lock battle music"};
private static final String s_edge[] = {"No flags set", "Party required", "Party enabled"};
public static void addScriptNames(Set<String> scriptNames, byte buffer[])
{
int offset = 0;
if (new String(buffer, 4, 4).equalsIgnoreCase("V9.1"))
offset = 16;
// Actors
if (ResourceFactory.getGameID() == ResourceFactory.ID_ICEWIND ||
ResourceFactory.getGameID() == ResourceFactory.ID_ICEWINDHOW ||
ResourceFactory.getGameID() == ResourceFactory.ID_ICEWINDHOWTOT ||
ResourceFactory.getGameID() == ResourceFactory.ID_ICEWIND2)
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 84),
(int)Byteconvert.convertShort(buffer, offset + 88), 272);
// ITEPoints
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 92),
(int)Byteconvert.convertShort(buffer, offset + 90), 196);
// Spawnpoints
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 96),
Byteconvert.convertInt(buffer, offset + 100), 200);
// Entrances
// addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 104),
// Byteconvert.convertInt(buffer, offset + 108), 104);
// Containers
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 112),
(int)Byteconvert.convertShort(buffer, offset + 116), 192);
// Ambients
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 132),
(int)Byteconvert.convertShort(buffer, offset + 130), 212);
// Variables
// addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 136),
// Byteconvert.convertInt(buffer, offset + 140), 84);
// Doors
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 168),
Byteconvert.convertInt(buffer, offset + 164), 200);
// Animations
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 176),
Byteconvert.convertInt(buffer, offset + 172), 76);
// Tiled objects
addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 184),
Byteconvert.convertInt(buffer, offset + 180), 108);
// Rest spawn
// addScriptNames(scriptNames, buffer, Byteconvert.convertInt(buffer, offset + 192), 1, 228);
}
private static void addScriptNames(Set<String> scriptNames, byte buffer[], int offset, int count, int size)
{
for (int i = 0; i < count; i++) {
StringBuilder sb = new StringBuilder(32);
for (int j = 0; j < 32; j++) {
byte b = buffer[offset + i * size + j];
if (b == 0x00)
break;
else if (b != 0x20) // Space
sb.append(Character.toLowerCase((char)b));
}
scriptNames.add(sb.toString());
}
}
public AreResource(ResourceEntry entry) throws Exception
{
super(entry);
}
// --------------------- Begin Interface HasAddRemovable ---------------------
public AddRemovable[] getAddRemovables() throws Exception
{
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
return new AddRemovable[]{new Actor(), new ITEPoint(), new SpawnPoint(),
new Entrance(), new Container(), new Ambient(),
new Variable(), new Door(), new Animation(),
new TiledObject(), new AutomapNotePST()};
else if (ResourceFactory.getGameID() == ResourceFactory.ID_BG2 ||
ResourceFactory.getGameID() == ResourceFactory.ID_BG2TOB ||
ResourceFactory.getGameID() == ResourceFactory.ID_TUTU ||
ResourceFactory.getGameID() == ResourceFactory.ID_BGEE)
return new AddRemovable[]{new Actor(), new ITEPoint(), new SpawnPoint(),
new Entrance(), new Container(), new Ambient(),
new Variable(), new Door(), new Animation(),
new TiledObject(), new AutomapNote(),
new ProTrap()};
else
return new AddRemovable[]{new Actor(), new ITEPoint(), new SpawnPoint(),
new Entrance(), new Container(), new Ambient(),
new Variable(), new Door(), new Animation(),
new TiledObject()};
}
// --------------------- End Interface HasAddRemovable ---------------------
// --------------------- Begin Interface HasDetailViewer ---------------------
public JComponent getDetailViewer()
{
JScrollPane scroll = new JScrollPane(new Viewer(this));
scroll.setBorder(BorderFactory.createEmptyBorder());
return scroll;
}
// --------------------- End Interface HasDetailViewer ---------------------
// --------------------- Begin Interface Writeable ---------------------
public void write(OutputStream os) throws IOException
{
super.writeFlatList(os);
}
// --------------------- End Interface Writeable ---------------------
protected void datatypeAdded(AddRemovable datatype)
{
HexNumber offset_vertices = (HexNumber)getAttribute("Vertices offset");
if (datatype.getOffset() <= offset_vertices.getValue())
offset_vertices.incValue(datatype.getSize());
HexNumber offset_items = (HexNumber)getAttribute("Items offset");
if (datatype.getOffset() <= offset_items.getValue())
offset_items.incValue(datatype.getSize());
if (datatype instanceof HasVertices)
updateVertices();
if (datatype instanceof Container)
updateItems();
updateActorCREOffsets();
}
protected void datatypeAddedInChild(AbstractStruct child, AddRemovable datatype)
{
if (datatype instanceof Vertex)
updateVertices();
else {
HexNumber offset_vertices = (HexNumber)getAttribute("Vertices offset");
if (datatype.getOffset() <= offset_vertices.getValue()) {
offset_vertices.incValue(datatype.getSize());
updateVertices();
}
}
if (datatype instanceof Item)
updateItems();
else {
HexNumber offset_items = (HexNumber)getAttribute("Items offset");
if (datatype.getOffset() <= offset_items.getValue()) {
offset_items.incValue(datatype.getSize());
updateItems();
}
}
updateActorCREOffsets();
}
protected void datatypeRemoved(AddRemovable datatype)
{
HexNumber offset_vertices = (HexNumber)getAttribute("Vertices offset");
if (datatype.getOffset() < offset_vertices.getValue())
offset_vertices.incValue(-datatype.getSize());
HexNumber offset_items = (HexNumber)getAttribute("Items offset");
if (datatype.getOffset() < offset_items.getValue())
offset_items.incValue(-datatype.getSize());
if (datatype instanceof HasVertices)
updateVertices();
if (datatype instanceof Container)
updateItems();
updateActorCREOffsets();
}
protected void datatypeRemovedInChild(AbstractStruct child, AddRemovable datatype)
{
if (datatype instanceof Vertex)
updateVertices();
else {
HexNumber offset_vertices = (HexNumber)getAttribute("Vertices offset");
if (datatype.getOffset() < offset_vertices.getValue()) {
offset_vertices.incValue(-datatype.getSize());
updateVertices();
}
}
if (datatype instanceof Item)
updateItems();
else {
HexNumber offset_items = (HexNumber)getAttribute("Items offset");
if (datatype.getOffset() < offset_items.getValue()) {
offset_items.incValue(-datatype.getSize());
updateItems();
}
}
updateActorCREOffsets();
}
protected int read(byte buffer[], int offset) throws Exception
{
list.add(new TextString(buffer, offset, 4, "Signature"));
TextString version = new TextString(buffer, offset + 4, 4, "Version");
list.add(version);
list.add(new ResourceRef(buffer, offset + 8, "WED resource", "WED"));
list.add(new DecNumber(buffer, offset + 16, 4, "Last saved"));
if (version.toString().equalsIgnoreCase("V9.1"))
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype_iwd2));
else if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Bitmap(buffer, offset + 20, 4, "Area type", s_atype_torment));
else
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype));
list.add(new ResourceRef(buffer, offset + 24, "Area north", "ARE"));
list.add(new Flag(buffer, offset + 32, 4, "Edge flags north", s_edge));
list.add(new ResourceRef(buffer, offset + 36, "Area east", "ARE"));
list.add(new Flag(buffer, offset + 44, 4, "Edge flags east", s_edge));
list.add(new ResourceRef(buffer, offset + 48, "Area south", "ARE"));
list.add(new Flag(buffer, offset + 56, 4, "Edge flags south", s_edge));
list.add(new ResourceRef(buffer, offset + 60, "Area west", "ARE"));
list.add(new Flag(buffer, offset + 68, 4, "Edge flags west", s_edge));
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag_torment));
else
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag));
list.add(new DecNumber(buffer, offset + 74, 2, "Rain probability"));
list.add(new DecNumber(buffer, offset + 76, 2, "Snow probability"));
list.add(new DecNumber(buffer, offset + 78, 2, "Fog probability"));
list.add(new DecNumber(buffer, offset + 80, 2, "Lightning probability"));
list.add(new DecNumber(buffer, offset + 82, 2, "Wind speed"));
if (version.toString().equalsIgnoreCase("V9.1")) {
list.add(new DecNumber(buffer, offset + 84, 1, "Area difficulty 2"));
list.add(new DecNumber(buffer, offset + 85, 1, "Area difficulty 3"));
list.add(new Unknown(buffer, offset + 86, 14));
offset += 16;
}
SectionOffset offset_actors = new SectionOffset(buffer, offset + 84, "Actors offset",
Actor.class);
list.add(offset_actors);
SectionCount count_actors = new SectionCount(buffer, offset + 88, 2, "# actors",
Actor.class);
list.add(count_actors);
SectionCount count_itepoints = new SectionCount(buffer, offset + 90, 2, "# triggers",
ITEPoint.class);
list.add(count_itepoints);
SectionOffset offset_itepoints = new SectionOffset(buffer, offset + 92,
"Triggers offset",
ITEPoint.class);
list.add(offset_itepoints);
SectionOffset offset_spoints = new SectionOffset(buffer, offset + 96, "Spawn points offset",
SpawnPoint.class);
list.add(offset_spoints);
SectionCount count_spoints = new SectionCount(buffer, offset + 100, 4, "# spawn points",
SpawnPoint.class);
list.add(count_spoints);
SectionOffset offset_entrances = new SectionOffset(buffer, offset + 104, "Entrances offset",
Entrance.class);
list.add(offset_entrances);
SectionCount count_entrances = new SectionCount(buffer, offset + 108, 4, "# entrances",
Entrance.class);
list.add(count_entrances);
SectionOffset offset_containers = new SectionOffset(buffer, offset + 112, "Containers offset",
Container.class);
list.add(offset_containers);
SectionCount count_containers = new SectionCount(buffer, offset + 116, 2, "# containers",
Container.class);
list.add(count_containers);
DecNumber count_items = new DecNumber(buffer, offset + 118, 2, "# items");
list.add(count_items);
HexNumber offset_items = new HexNumber(buffer, offset + 120, 4, "Items offset");
list.add(offset_items);
HexNumber offset_vertices = new HexNumber(buffer, offset + 124, 4, "Vertices offset");
list.add(offset_vertices);
DecNumber count_vertices = new DecNumber(buffer, offset + 128, 2, "# vertices");
list.add(count_vertices);
SectionCount count_ambients = new SectionCount(buffer, offset + 130, 2, "# ambients",
Ambient.class);
list.add(count_ambients);
SectionOffset offset_ambients = new SectionOffset(buffer, offset + 132, "Ambients offset",
Ambient.class);
list.add(offset_ambients);
SectionOffset offset_variables = new SectionOffset(buffer, offset + 136, "Variables offset",
Variable.class);
list.add(offset_variables);
SectionCount count_variables = new SectionCount(buffer, offset + 140, 2, "# variables",
Variable.class);
list.add(count_variables);
list.add(new HexNumber(buffer, offset + 142, 2, "# object flags"));
list.add(new HexNumber(buffer, offset + 144, 4, "Object flags offset"));
list.add(new ResourceRef(buffer, offset + 148, "Area script", "BCS"));
SectionCount size_exploredbitmap = new SectionCount(buffer, offset + 156, 4, "Explored bitmap size",
Unknown.class);
list.add(size_exploredbitmap);
SectionOffset offset_exploredbitmap = new SectionOffset(buffer, offset + 160, "Explored bitmap offset",
Unknown.class);
list.add(offset_exploredbitmap);
SectionCount count_doors = new SectionCount(buffer, offset + 164, 4, "# doors",
Door.class);
list.add(count_doors);
SectionOffset offset_doors = new SectionOffset(buffer, offset + 168, "Doors offset",
Door.class);
list.add(offset_doors);
SectionCount count_animations = new SectionCount(buffer, offset + 172, 4, "# animations",
Animation.class);
list.add(count_animations);
SectionOffset offset_animations = new SectionOffset(buffer, offset + 176, "Animations offset",
Animation.class);
list.add(offset_animations);
SectionCount count_tiledobjects = new SectionCount(buffer, offset + 180, 4, "# tiled objects",
TiledObject.class);
list.add(count_tiledobjects);
SectionOffset offset_tiledobjects = new SectionOffset(buffer, offset + 184, "Tiled objects offset",
TiledObject.class);
list.add(offset_tiledobjects);
SectionOffset offset_songs = new SectionOffset(buffer, offset + 188, "Songs offset",
Song.class);
list.add(offset_songs);
SectionOffset offset_rest = new SectionOffset(buffer, offset + 192, "Rest encounters offset",
RestSpawn.class);
list.add(offset_rest);
SectionOffset offset_automapnote = null, offset_protrap = null;
SectionCount count_automapnote = null, count_protrap = null;
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
list.add(new Unknown(buffer, offset + 196, 4));
offset_automapnote = new SectionOffset(buffer, offset + 200, "Automap notes offset",
AutomapNotePST.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 204, 4, "# automap notes",
AutomapNotePST.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 208, 76));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_BG2 ||
ResourceFactory.getGameID() == ResourceFactory.ID_BG2TOB ||
- ResourceFactory.getGameID() == ResourceFactory.ID_TUTU) {
+ ResourceFactory.getGameID() == ResourceFactory.ID_TUTU ||
+ ResourceFactory.getGameID() == ResourceFactory.ID_BGEE) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
offset_protrap = new SectionOffset(buffer, offset + 204, "Projectile traps offset",
ProTrap.class);
list.add(offset_protrap);
count_protrap = new SectionCount(buffer, offset + 208, 4, "# projectile traps",
ProTrap.class);
list.add(count_protrap);
list.add(new ResourceRef(buffer, offset + 212, "Rest movie (day)", "MVE"));
list.add(new ResourceRef(buffer, offset + 220, "Rest movie (night)", "MVE"));
list.add(new Unknown(buffer, offset + 228, 56));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_ICEWIND2) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 204, 80));
}
else
list.add(new Unknown(buffer, offset + 196, 88));
offset = offset_actors.getValue();
for (int i = 0; i < count_actors.getValue(); i++) {
Actor actor = new Actor(this, buffer, offset, i + 1);
offset = actor.getEndOffset();
list.add(actor);
}
offset = offset_itepoints.getValue();
for (int i = 0; i < count_itepoints.getValue(); i++) {
ITEPoint ite = new ITEPoint(this, buffer, offset);
offset = ite.getEndOffset();
list.add(ite);
}
offset = offset_spoints.getValue();
for (int i = 0; i < count_spoints.getValue(); i++) {
SpawnPoint sp = new SpawnPoint(this, buffer, offset);
offset = sp.getEndOffset();
list.add(sp);
}
offset = offset_entrances.getValue();
for (int i = 0; i < count_entrances.getValue(); i++) {
Entrance ent = new Entrance(this, buffer, offset);
offset = ent.getEndOffset();
list.add(ent);
}
offset = offset_containers.getValue();
for (int i = 0; i < count_containers.getValue(); i++) {
Container con = new Container(this, buffer, offset, i + 1);
offset = con.getEndOffset();
list.add(con);
}
offset = offset_ambients.getValue();
for (int i = 0; i < count_ambients.getValue(); i++) {
Ambient ambi = new Ambient(this, buffer, offset, i + 1);
offset = ambi.getEndOffset();
list.add(ambi);
}
offset = offset_variables.getValue();
for (int i = 0; i < count_variables.getValue(); i++) {
Variable var = new Variable(this, buffer, offset);
offset = var.getEndOffset();
list.add(var);
}
offset = offset_exploredbitmap.getValue();
if (size_exploredbitmap.getValue() > 0)
list.add(new Unknown(buffer, offset, size_exploredbitmap.getValue(), "Explored bitmap"));
offset = offset_doors.getValue();
for (int i = 0; i < count_doors.getValue(); i++) {
Door door = new Door(this, buffer, offset, i + 1);
offset = door.getEndOffset();
list.add(door);
}
offset = offset_animations.getValue();
for (int i = 0; i < count_animations.getValue(); i++) {
Animation anim = new Animation(this, buffer, offset);
offset = anim.getEndOffset();
list.add(anim);
}
offset = offset_tiledobjects.getValue();
for (int i = 0; i < count_tiledobjects.getValue(); i++) {
TiledObject tile = new TiledObject(this, buffer, offset);
offset = tile.getEndOffset();
list.add(tile);
}
if (offset_automapnote != null) { // Torment, BG2
offset = offset_automapnote.getValue();
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNotePST note = new AutomapNotePST(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
else {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNote note = new AutomapNote(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
}
if (offset_protrap != null) { // BG2
offset = offset_protrap.getValue();
for (int i = 0; i < count_protrap.getValue(); i++) {
ProTrap trap = new ProTrap(this, buffer, offset);
offset = trap.getEndOffset();
list.add(trap);
}
}
offset = offset_items.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof Container)
((Container)o).readItems(buffer, offset);
}
offset = offset_vertices.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof HasVertices)
((HasVertices)o).readVertices(buffer, offset);
}
if (offset_songs.getValue() > 0)
list.add(new Song(this, buffer, offset_songs.getValue()));
if (offset_rest.getValue() > 0)
list.add(new RestSpawn(this, buffer, offset_rest.getValue()));
int endoffset = offset;
for (int i = 0; i < list.size(); i++) {
StructEntry entry = list.get(i);
if (entry.getOffset() + entry.getSize() > endoffset)
endoffset = entry.getOffset() + entry.getSize();
}
return endoffset;
}
private void updateActorCREOffsets()
{
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof Actor)
((Actor)o).updateCREOffset();
}
}
private void updateItems()
{
// Assumes items offset is correct
int offset = ((HexNumber)getAttribute("Items offset")).getValue();
int count = 0;
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof Container) {
Container container = (Container)o;
int itemNum = container.updateItems(offset, count);
offset += 20 * itemNum;
count += itemNum;
}
}
((DecNumber)getAttribute("# items")).setValue(count);
}
private void updateVertices()
{
// Assumes vertices offset is correct
int offset = ((HexNumber)getAttribute("Vertices offset")).getValue();
int count = 0;
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof HasVertices) {
HasVertices vert = (HasVertices)o;
int vertNum = vert.updateVertices(offset, count);
offset += 4 * vertNum;
count += vertNum;
}
}
((DecNumber)getAttribute("# vertices")).setValue(count);
}
}
| true | true | protected int read(byte buffer[], int offset) throws Exception
{
list.add(new TextString(buffer, offset, 4, "Signature"));
TextString version = new TextString(buffer, offset + 4, 4, "Version");
list.add(version);
list.add(new ResourceRef(buffer, offset + 8, "WED resource", "WED"));
list.add(new DecNumber(buffer, offset + 16, 4, "Last saved"));
if (version.toString().equalsIgnoreCase("V9.1"))
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype_iwd2));
else if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Bitmap(buffer, offset + 20, 4, "Area type", s_atype_torment));
else
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype));
list.add(new ResourceRef(buffer, offset + 24, "Area north", "ARE"));
list.add(new Flag(buffer, offset + 32, 4, "Edge flags north", s_edge));
list.add(new ResourceRef(buffer, offset + 36, "Area east", "ARE"));
list.add(new Flag(buffer, offset + 44, 4, "Edge flags east", s_edge));
list.add(new ResourceRef(buffer, offset + 48, "Area south", "ARE"));
list.add(new Flag(buffer, offset + 56, 4, "Edge flags south", s_edge));
list.add(new ResourceRef(buffer, offset + 60, "Area west", "ARE"));
list.add(new Flag(buffer, offset + 68, 4, "Edge flags west", s_edge));
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag_torment));
else
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag));
list.add(new DecNumber(buffer, offset + 74, 2, "Rain probability"));
list.add(new DecNumber(buffer, offset + 76, 2, "Snow probability"));
list.add(new DecNumber(buffer, offset + 78, 2, "Fog probability"));
list.add(new DecNumber(buffer, offset + 80, 2, "Lightning probability"));
list.add(new DecNumber(buffer, offset + 82, 2, "Wind speed"));
if (version.toString().equalsIgnoreCase("V9.1")) {
list.add(new DecNumber(buffer, offset + 84, 1, "Area difficulty 2"));
list.add(new DecNumber(buffer, offset + 85, 1, "Area difficulty 3"));
list.add(new Unknown(buffer, offset + 86, 14));
offset += 16;
}
SectionOffset offset_actors = new SectionOffset(buffer, offset + 84, "Actors offset",
Actor.class);
list.add(offset_actors);
SectionCount count_actors = new SectionCount(buffer, offset + 88, 2, "# actors",
Actor.class);
list.add(count_actors);
SectionCount count_itepoints = new SectionCount(buffer, offset + 90, 2, "# triggers",
ITEPoint.class);
list.add(count_itepoints);
SectionOffset offset_itepoints = new SectionOffset(buffer, offset + 92,
"Triggers offset",
ITEPoint.class);
list.add(offset_itepoints);
SectionOffset offset_spoints = new SectionOffset(buffer, offset + 96, "Spawn points offset",
SpawnPoint.class);
list.add(offset_spoints);
SectionCount count_spoints = new SectionCount(buffer, offset + 100, 4, "# spawn points",
SpawnPoint.class);
list.add(count_spoints);
SectionOffset offset_entrances = new SectionOffset(buffer, offset + 104, "Entrances offset",
Entrance.class);
list.add(offset_entrances);
SectionCount count_entrances = new SectionCount(buffer, offset + 108, 4, "# entrances",
Entrance.class);
list.add(count_entrances);
SectionOffset offset_containers = new SectionOffset(buffer, offset + 112, "Containers offset",
Container.class);
list.add(offset_containers);
SectionCount count_containers = new SectionCount(buffer, offset + 116, 2, "# containers",
Container.class);
list.add(count_containers);
DecNumber count_items = new DecNumber(buffer, offset + 118, 2, "# items");
list.add(count_items);
HexNumber offset_items = new HexNumber(buffer, offset + 120, 4, "Items offset");
list.add(offset_items);
HexNumber offset_vertices = new HexNumber(buffer, offset + 124, 4, "Vertices offset");
list.add(offset_vertices);
DecNumber count_vertices = new DecNumber(buffer, offset + 128, 2, "# vertices");
list.add(count_vertices);
SectionCount count_ambients = new SectionCount(buffer, offset + 130, 2, "# ambients",
Ambient.class);
list.add(count_ambients);
SectionOffset offset_ambients = new SectionOffset(buffer, offset + 132, "Ambients offset",
Ambient.class);
list.add(offset_ambients);
SectionOffset offset_variables = new SectionOffset(buffer, offset + 136, "Variables offset",
Variable.class);
list.add(offset_variables);
SectionCount count_variables = new SectionCount(buffer, offset + 140, 2, "# variables",
Variable.class);
list.add(count_variables);
list.add(new HexNumber(buffer, offset + 142, 2, "# object flags"));
list.add(new HexNumber(buffer, offset + 144, 4, "Object flags offset"));
list.add(new ResourceRef(buffer, offset + 148, "Area script", "BCS"));
SectionCount size_exploredbitmap = new SectionCount(buffer, offset + 156, 4, "Explored bitmap size",
Unknown.class);
list.add(size_exploredbitmap);
SectionOffset offset_exploredbitmap = new SectionOffset(buffer, offset + 160, "Explored bitmap offset",
Unknown.class);
list.add(offset_exploredbitmap);
SectionCount count_doors = new SectionCount(buffer, offset + 164, 4, "# doors",
Door.class);
list.add(count_doors);
SectionOffset offset_doors = new SectionOffset(buffer, offset + 168, "Doors offset",
Door.class);
list.add(offset_doors);
SectionCount count_animations = new SectionCount(buffer, offset + 172, 4, "# animations",
Animation.class);
list.add(count_animations);
SectionOffset offset_animations = new SectionOffset(buffer, offset + 176, "Animations offset",
Animation.class);
list.add(offset_animations);
SectionCount count_tiledobjects = new SectionCount(buffer, offset + 180, 4, "# tiled objects",
TiledObject.class);
list.add(count_tiledobjects);
SectionOffset offset_tiledobjects = new SectionOffset(buffer, offset + 184, "Tiled objects offset",
TiledObject.class);
list.add(offset_tiledobjects);
SectionOffset offset_songs = new SectionOffset(buffer, offset + 188, "Songs offset",
Song.class);
list.add(offset_songs);
SectionOffset offset_rest = new SectionOffset(buffer, offset + 192, "Rest encounters offset",
RestSpawn.class);
list.add(offset_rest);
SectionOffset offset_automapnote = null, offset_protrap = null;
SectionCount count_automapnote = null, count_protrap = null;
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
list.add(new Unknown(buffer, offset + 196, 4));
offset_automapnote = new SectionOffset(buffer, offset + 200, "Automap notes offset",
AutomapNotePST.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 204, 4, "# automap notes",
AutomapNotePST.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 208, 76));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_BG2 ||
ResourceFactory.getGameID() == ResourceFactory.ID_BG2TOB ||
ResourceFactory.getGameID() == ResourceFactory.ID_TUTU) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
offset_protrap = new SectionOffset(buffer, offset + 204, "Projectile traps offset",
ProTrap.class);
list.add(offset_protrap);
count_protrap = new SectionCount(buffer, offset + 208, 4, "# projectile traps",
ProTrap.class);
list.add(count_protrap);
list.add(new ResourceRef(buffer, offset + 212, "Rest movie (day)", "MVE"));
list.add(new ResourceRef(buffer, offset + 220, "Rest movie (night)", "MVE"));
list.add(new Unknown(buffer, offset + 228, 56));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_ICEWIND2) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 204, 80));
}
else
list.add(new Unknown(buffer, offset + 196, 88));
offset = offset_actors.getValue();
for (int i = 0; i < count_actors.getValue(); i++) {
Actor actor = new Actor(this, buffer, offset, i + 1);
offset = actor.getEndOffset();
list.add(actor);
}
offset = offset_itepoints.getValue();
for (int i = 0; i < count_itepoints.getValue(); i++) {
ITEPoint ite = new ITEPoint(this, buffer, offset);
offset = ite.getEndOffset();
list.add(ite);
}
offset = offset_spoints.getValue();
for (int i = 0; i < count_spoints.getValue(); i++) {
SpawnPoint sp = new SpawnPoint(this, buffer, offset);
offset = sp.getEndOffset();
list.add(sp);
}
offset = offset_entrances.getValue();
for (int i = 0; i < count_entrances.getValue(); i++) {
Entrance ent = new Entrance(this, buffer, offset);
offset = ent.getEndOffset();
list.add(ent);
}
offset = offset_containers.getValue();
for (int i = 0; i < count_containers.getValue(); i++) {
Container con = new Container(this, buffer, offset, i + 1);
offset = con.getEndOffset();
list.add(con);
}
offset = offset_ambients.getValue();
for (int i = 0; i < count_ambients.getValue(); i++) {
Ambient ambi = new Ambient(this, buffer, offset, i + 1);
offset = ambi.getEndOffset();
list.add(ambi);
}
offset = offset_variables.getValue();
for (int i = 0; i < count_variables.getValue(); i++) {
Variable var = new Variable(this, buffer, offset);
offset = var.getEndOffset();
list.add(var);
}
offset = offset_exploredbitmap.getValue();
if (size_exploredbitmap.getValue() > 0)
list.add(new Unknown(buffer, offset, size_exploredbitmap.getValue(), "Explored bitmap"));
offset = offset_doors.getValue();
for (int i = 0; i < count_doors.getValue(); i++) {
Door door = new Door(this, buffer, offset, i + 1);
offset = door.getEndOffset();
list.add(door);
}
offset = offset_animations.getValue();
for (int i = 0; i < count_animations.getValue(); i++) {
Animation anim = new Animation(this, buffer, offset);
offset = anim.getEndOffset();
list.add(anim);
}
offset = offset_tiledobjects.getValue();
for (int i = 0; i < count_tiledobjects.getValue(); i++) {
TiledObject tile = new TiledObject(this, buffer, offset);
offset = tile.getEndOffset();
list.add(tile);
}
if (offset_automapnote != null) { // Torment, BG2
offset = offset_automapnote.getValue();
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNotePST note = new AutomapNotePST(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
else {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNote note = new AutomapNote(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
}
if (offset_protrap != null) { // BG2
offset = offset_protrap.getValue();
for (int i = 0; i < count_protrap.getValue(); i++) {
ProTrap trap = new ProTrap(this, buffer, offset);
offset = trap.getEndOffset();
list.add(trap);
}
}
offset = offset_items.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof Container)
((Container)o).readItems(buffer, offset);
}
offset = offset_vertices.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof HasVertices)
((HasVertices)o).readVertices(buffer, offset);
}
if (offset_songs.getValue() > 0)
list.add(new Song(this, buffer, offset_songs.getValue()));
if (offset_rest.getValue() > 0)
list.add(new RestSpawn(this, buffer, offset_rest.getValue()));
int endoffset = offset;
for (int i = 0; i < list.size(); i++) {
StructEntry entry = list.get(i);
if (entry.getOffset() + entry.getSize() > endoffset)
endoffset = entry.getOffset() + entry.getSize();
}
return endoffset;
}
| protected int read(byte buffer[], int offset) throws Exception
{
list.add(new TextString(buffer, offset, 4, "Signature"));
TextString version = new TextString(buffer, offset + 4, 4, "Version");
list.add(version);
list.add(new ResourceRef(buffer, offset + 8, "WED resource", "WED"));
list.add(new DecNumber(buffer, offset + 16, 4, "Last saved"));
if (version.toString().equalsIgnoreCase("V9.1"))
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype_iwd2));
else if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Bitmap(buffer, offset + 20, 4, "Area type", s_atype_torment));
else
list.add(new Flag(buffer, offset + 20, 4, "Area type", s_atype));
list.add(new ResourceRef(buffer, offset + 24, "Area north", "ARE"));
list.add(new Flag(buffer, offset + 32, 4, "Edge flags north", s_edge));
list.add(new ResourceRef(buffer, offset + 36, "Area east", "ARE"));
list.add(new Flag(buffer, offset + 44, 4, "Edge flags east", s_edge));
list.add(new ResourceRef(buffer, offset + 48, "Area south", "ARE"));
list.add(new Flag(buffer, offset + 56, 4, "Edge flags south", s_edge));
list.add(new ResourceRef(buffer, offset + 60, "Area west", "ARE"));
list.add(new Flag(buffer, offset + 68, 4, "Edge flags west", s_edge));
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT)
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag_torment));
else
list.add(new Flag(buffer, offset + 72, 2, "Location", s_flag));
list.add(new DecNumber(buffer, offset + 74, 2, "Rain probability"));
list.add(new DecNumber(buffer, offset + 76, 2, "Snow probability"));
list.add(new DecNumber(buffer, offset + 78, 2, "Fog probability"));
list.add(new DecNumber(buffer, offset + 80, 2, "Lightning probability"));
list.add(new DecNumber(buffer, offset + 82, 2, "Wind speed"));
if (version.toString().equalsIgnoreCase("V9.1")) {
list.add(new DecNumber(buffer, offset + 84, 1, "Area difficulty 2"));
list.add(new DecNumber(buffer, offset + 85, 1, "Area difficulty 3"));
list.add(new Unknown(buffer, offset + 86, 14));
offset += 16;
}
SectionOffset offset_actors = new SectionOffset(buffer, offset + 84, "Actors offset",
Actor.class);
list.add(offset_actors);
SectionCount count_actors = new SectionCount(buffer, offset + 88, 2, "# actors",
Actor.class);
list.add(count_actors);
SectionCount count_itepoints = new SectionCount(buffer, offset + 90, 2, "# triggers",
ITEPoint.class);
list.add(count_itepoints);
SectionOffset offset_itepoints = new SectionOffset(buffer, offset + 92,
"Triggers offset",
ITEPoint.class);
list.add(offset_itepoints);
SectionOffset offset_spoints = new SectionOffset(buffer, offset + 96, "Spawn points offset",
SpawnPoint.class);
list.add(offset_spoints);
SectionCount count_spoints = new SectionCount(buffer, offset + 100, 4, "# spawn points",
SpawnPoint.class);
list.add(count_spoints);
SectionOffset offset_entrances = new SectionOffset(buffer, offset + 104, "Entrances offset",
Entrance.class);
list.add(offset_entrances);
SectionCount count_entrances = new SectionCount(buffer, offset + 108, 4, "# entrances",
Entrance.class);
list.add(count_entrances);
SectionOffset offset_containers = new SectionOffset(buffer, offset + 112, "Containers offset",
Container.class);
list.add(offset_containers);
SectionCount count_containers = new SectionCount(buffer, offset + 116, 2, "# containers",
Container.class);
list.add(count_containers);
DecNumber count_items = new DecNumber(buffer, offset + 118, 2, "# items");
list.add(count_items);
HexNumber offset_items = new HexNumber(buffer, offset + 120, 4, "Items offset");
list.add(offset_items);
HexNumber offset_vertices = new HexNumber(buffer, offset + 124, 4, "Vertices offset");
list.add(offset_vertices);
DecNumber count_vertices = new DecNumber(buffer, offset + 128, 2, "# vertices");
list.add(count_vertices);
SectionCount count_ambients = new SectionCount(buffer, offset + 130, 2, "# ambients",
Ambient.class);
list.add(count_ambients);
SectionOffset offset_ambients = new SectionOffset(buffer, offset + 132, "Ambients offset",
Ambient.class);
list.add(offset_ambients);
SectionOffset offset_variables = new SectionOffset(buffer, offset + 136, "Variables offset",
Variable.class);
list.add(offset_variables);
SectionCount count_variables = new SectionCount(buffer, offset + 140, 2, "# variables",
Variable.class);
list.add(count_variables);
list.add(new HexNumber(buffer, offset + 142, 2, "# object flags"));
list.add(new HexNumber(buffer, offset + 144, 4, "Object flags offset"));
list.add(new ResourceRef(buffer, offset + 148, "Area script", "BCS"));
SectionCount size_exploredbitmap = new SectionCount(buffer, offset + 156, 4, "Explored bitmap size",
Unknown.class);
list.add(size_exploredbitmap);
SectionOffset offset_exploredbitmap = new SectionOffset(buffer, offset + 160, "Explored bitmap offset",
Unknown.class);
list.add(offset_exploredbitmap);
SectionCount count_doors = new SectionCount(buffer, offset + 164, 4, "# doors",
Door.class);
list.add(count_doors);
SectionOffset offset_doors = new SectionOffset(buffer, offset + 168, "Doors offset",
Door.class);
list.add(offset_doors);
SectionCount count_animations = new SectionCount(buffer, offset + 172, 4, "# animations",
Animation.class);
list.add(count_animations);
SectionOffset offset_animations = new SectionOffset(buffer, offset + 176, "Animations offset",
Animation.class);
list.add(offset_animations);
SectionCount count_tiledobjects = new SectionCount(buffer, offset + 180, 4, "# tiled objects",
TiledObject.class);
list.add(count_tiledobjects);
SectionOffset offset_tiledobjects = new SectionOffset(buffer, offset + 184, "Tiled objects offset",
TiledObject.class);
list.add(offset_tiledobjects);
SectionOffset offset_songs = new SectionOffset(buffer, offset + 188, "Songs offset",
Song.class);
list.add(offset_songs);
SectionOffset offset_rest = new SectionOffset(buffer, offset + 192, "Rest encounters offset",
RestSpawn.class);
list.add(offset_rest);
SectionOffset offset_automapnote = null, offset_protrap = null;
SectionCount count_automapnote = null, count_protrap = null;
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
list.add(new Unknown(buffer, offset + 196, 4));
offset_automapnote = new SectionOffset(buffer, offset + 200, "Automap notes offset",
AutomapNotePST.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 204, 4, "# automap notes",
AutomapNotePST.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 208, 76));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_BG2 ||
ResourceFactory.getGameID() == ResourceFactory.ID_BG2TOB ||
ResourceFactory.getGameID() == ResourceFactory.ID_TUTU ||
ResourceFactory.getGameID() == ResourceFactory.ID_BGEE) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
offset_protrap = new SectionOffset(buffer, offset + 204, "Projectile traps offset",
ProTrap.class);
list.add(offset_protrap);
count_protrap = new SectionCount(buffer, offset + 208, 4, "# projectile traps",
ProTrap.class);
list.add(count_protrap);
list.add(new ResourceRef(buffer, offset + 212, "Rest movie (day)", "MVE"));
list.add(new ResourceRef(buffer, offset + 220, "Rest movie (night)", "MVE"));
list.add(new Unknown(buffer, offset + 228, 56));
}
else if (ResourceFactory.getGameID() == ResourceFactory.ID_ICEWIND2) {
offset_automapnote = new SectionOffset(buffer, offset + 196, "Automap notes offset",
AutomapNote.class);
list.add(offset_automapnote);
count_automapnote = new SectionCount(buffer, offset + 200, 4, "# automap notes",
AutomapNote.class);
list.add(count_automapnote);
list.add(new Unknown(buffer, offset + 204, 80));
}
else
list.add(new Unknown(buffer, offset + 196, 88));
offset = offset_actors.getValue();
for (int i = 0; i < count_actors.getValue(); i++) {
Actor actor = new Actor(this, buffer, offset, i + 1);
offset = actor.getEndOffset();
list.add(actor);
}
offset = offset_itepoints.getValue();
for (int i = 0; i < count_itepoints.getValue(); i++) {
ITEPoint ite = new ITEPoint(this, buffer, offset);
offset = ite.getEndOffset();
list.add(ite);
}
offset = offset_spoints.getValue();
for (int i = 0; i < count_spoints.getValue(); i++) {
SpawnPoint sp = new SpawnPoint(this, buffer, offset);
offset = sp.getEndOffset();
list.add(sp);
}
offset = offset_entrances.getValue();
for (int i = 0; i < count_entrances.getValue(); i++) {
Entrance ent = new Entrance(this, buffer, offset);
offset = ent.getEndOffset();
list.add(ent);
}
offset = offset_containers.getValue();
for (int i = 0; i < count_containers.getValue(); i++) {
Container con = new Container(this, buffer, offset, i + 1);
offset = con.getEndOffset();
list.add(con);
}
offset = offset_ambients.getValue();
for (int i = 0; i < count_ambients.getValue(); i++) {
Ambient ambi = new Ambient(this, buffer, offset, i + 1);
offset = ambi.getEndOffset();
list.add(ambi);
}
offset = offset_variables.getValue();
for (int i = 0; i < count_variables.getValue(); i++) {
Variable var = new Variable(this, buffer, offset);
offset = var.getEndOffset();
list.add(var);
}
offset = offset_exploredbitmap.getValue();
if (size_exploredbitmap.getValue() > 0)
list.add(new Unknown(buffer, offset, size_exploredbitmap.getValue(), "Explored bitmap"));
offset = offset_doors.getValue();
for (int i = 0; i < count_doors.getValue(); i++) {
Door door = new Door(this, buffer, offset, i + 1);
offset = door.getEndOffset();
list.add(door);
}
offset = offset_animations.getValue();
for (int i = 0; i < count_animations.getValue(); i++) {
Animation anim = new Animation(this, buffer, offset);
offset = anim.getEndOffset();
list.add(anim);
}
offset = offset_tiledobjects.getValue();
for (int i = 0; i < count_tiledobjects.getValue(); i++) {
TiledObject tile = new TiledObject(this, buffer, offset);
offset = tile.getEndOffset();
list.add(tile);
}
if (offset_automapnote != null) { // Torment, BG2
offset = offset_automapnote.getValue();
if (ResourceFactory.getGameID() == ResourceFactory.ID_TORMENT) {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNotePST note = new AutomapNotePST(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
else {
for (int i = 0; i < count_automapnote.getValue(); i++) {
AutomapNote note = new AutomapNote(this, buffer, offset);
offset = note.getEndOffset();
list.add(note);
}
}
}
if (offset_protrap != null) { // BG2
offset = offset_protrap.getValue();
for (int i = 0; i < count_protrap.getValue(); i++) {
ProTrap trap = new ProTrap(this, buffer, offset);
offset = trap.getEndOffset();
list.add(trap);
}
}
offset = offset_items.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof Container)
((Container)o).readItems(buffer, offset);
}
offset = offset_vertices.getValue();
for (int i = 0; i < list.size(); i++) {
Object o = list.get(i);
if (o instanceof HasVertices)
((HasVertices)o).readVertices(buffer, offset);
}
if (offset_songs.getValue() > 0)
list.add(new Song(this, buffer, offset_songs.getValue()));
if (offset_rest.getValue() > 0)
list.add(new RestSpawn(this, buffer, offset_rest.getValue()));
int endoffset = offset;
for (int i = 0; i < list.size(); i++) {
StructEntry entry = list.get(i);
if (entry.getOffset() + entry.getSize() > endoffset)
endoffset = entry.getOffset() + entry.getSize();
}
return endoffset;
}
|
diff --git a/tests/org.jboss.tools.ws.ui.bot.test/src/org/jboss/tools/ws/ui/bot/test/webservice/eap/EAPFromJavaTest.java b/tests/org.jboss.tools.ws.ui.bot.test/src/org/jboss/tools/ws/ui/bot/test/webservice/eap/EAPFromJavaTest.java
index 18113d36f..1eaa9b45b 100644
--- a/tests/org.jboss.tools.ws.ui.bot.test/src/org/jboss/tools/ws/ui/bot/test/webservice/eap/EAPFromJavaTest.java
+++ b/tests/org.jboss.tools.ws.ui.bot.test/src/org/jboss/tools/ws/ui/bot/test/webservice/eap/EAPFromJavaTest.java
@@ -1,164 +1,165 @@
/*******************************************************************************
* Copyright (c) 2011 Red Hat, Inc.
* Distributed under license by Red Hat, Inc. All rights reserved.
* This program is made available under the terms of the
* Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
******************************************************************************/
package org.jboss.tools.ws.ui.bot.test.webservice.eap;
import java.util.logging.Level;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEclipseEditor;
import org.jboss.tools.ui.bot.ext.config.Annotations.Require;
import org.jboss.tools.ui.bot.ext.config.Annotations.Server;
import org.jboss.tools.ws.ui.bot.test.WSAllBotTests;
import org.jboss.tools.ws.ui.bot.test.uiutils.actions.NewFileWizardAction;
import org.jboss.tools.ws.ui.bot.test.uiutils.wizards.Wizard;
import org.jboss.tools.ws.ui.bot.test.uiutils.wizards.WsWizardBase.Slider_Level;
import org.jboss.tools.ws.ui.bot.test.webservice.WebServiceTestBase;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runners.Suite.SuiteClasses;
/**
* Test operates on creating non-trivial EAP project from Java class
* @author jlukas
*
*/
@SuiteClasses({ WSAllBotTests.class, EAPCompAllTests.class })
@Require(perspective="Java EE",
server=@Server)
// (type=ServerType.EAP,
// version = "5.1", operator = ">="))
public class EAPFromJavaTest extends WebServiceTestBase {
private static boolean servicePassed = false;
@Before
@Override
public void setup() {
if (!projectExists(getWsProjectName())) {
projectHelper.createProject(getWsProjectName());
}
if (!projectExists(getWsClientProjectName())) {
projectHelper.createProject(getWsClientProjectName());
}
}
@After
@Override
public void cleanup() {
//do nothing here
//we don't want to undeploy our app yet
}
@AfterClass
public static void removeAllProjects() {
servers.removeAllProjectsFromServer();
}
@Override
protected String getWsProjectName() {
return "TestWSProject";
}
@Override
protected String getEarProjectName() {
return getWsProjectName() + "EAR";
}
protected String getWsClientProjectName() {
return "TestWSClientProject";
}
@Override
protected String getWsPackage() {
return "test.ws";
}
@Override
protected String getWsName() {
return "Echo";
}
@Override
protected Slider_Level getLevel() {
return Slider_Level.DEPLOY;
}
@Test
public void testService() {
//create a class representing some complex type
SWTBotEclipseEditor st = projectHelper.createClass(getWsProjectName(), "test", "Person").toTextEditor();
st.selectRange(0, 0, st.getText().length());
st.setText(resourceHelper.readStream(EAPFromJavaTest.class.getResourceAsStream("/resources/jbossws/Person.java.ws")));
st.saveAndClose();
//refresh workspace - workaround??? for JBIDE-6731
try {
ResourcesPlugin.getWorkspace().getRoot().refreshLocal(IWorkspaceRoot.DEPTH_INFINITE, new NullProgressMonitor());
} catch (CoreException e) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
}
bot.sleep(TIME_500MS);
bottomUpJbossWebService(EAPFromJavaTest.class.getResourceAsStream("/resources/jbossws/Echo.java.ws"));
IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(getWsProjectName());
IFile f = project.getFile("WebContent/WEB-INF/web.xml");
String content = resourceHelper.readFile(f);
Assert.assertNotNull(content);
Assert.assertTrue(content.contains("<servlet-class>test.ws.Echo</servlet-class>"));
Assert.assertTrue(content.contains("<url-pattern>/Echo</url-pattern>"));
deploymentHelper.runProject(getEarProjectName());
deploymentHelper.assertServiceDeployed(deploymentHelper.getWSDLUrl(getWsProjectName(), getWsName()), 10000);
servicePassed = true;
}
@Test
public void testClient() {
Assert.assertTrue("service must exist", servicePassed);
clientHelper.createClient(deploymentHelper.getWSDLUrl(getWsProjectName(), getWsName()),
getWsClientProjectName(), getLevel(), "");
IProject p = ResourcesPlugin.getWorkspace().getRoot().getProject(getWsClientProjectName());
String pkg = "test/ws";
String cls = "src/" + pkg + "/EchoService.java";
Assert.assertTrue(p.getFile(cls).exists());
cls = "src/" + pkg + "/clientsample/ClientSample.java";
Assert.assertTrue(p.getFile(cls).exists());
//create JSP
new NewFileWizardAction().run().selectTemplate("Web", "JSP File").next();
Wizard w = new Wizard();
w.bot().textWithLabel("File name:").setText("index");
w.bot().textWithLabel("Enter or select the parent folder:").setText(getWsClientProjectName() + "/WebContent");
w.finish();
bot.sleep(TIME_5S);
/**
* Workaround for 4.x branch
*
* bot.activeShell().bot().button("Skip").click();
* bot.sleep(TIME_5S);
*/
SWTBotEclipseEditor st = bot.editorByTitle("index.jsp").toTextEditor();
st.selectRange(0, 0, st.getText().length());
st.setText(resourceHelper.readStream(EAPFromJavaTest.class.getResourceAsStream("/resources/jbossws/index.jsp.ws")));
st.saveAndClose();
bot.sleep(TIME_1S*2);
deploymentHelper.runProject(getWsClientProjectName());
+ servers.cleanServer(configuredState.getServer().name);
String pageContent = deploymentHelper.getPage("http://localhost:8080/" + getWsClientProjectName() + "/index.jsp", 15000);
LOGGER.info(pageContent);
Assert.assertTrue(pageContent.contains("BartSimpson(age: 12)"));
Assert.assertTrue(pageContent.contains("Homer(age: 44)"));
}
}
| true | true | public void testClient() {
Assert.assertTrue("service must exist", servicePassed);
clientHelper.createClient(deploymentHelper.getWSDLUrl(getWsProjectName(), getWsName()),
getWsClientProjectName(), getLevel(), "");
IProject p = ResourcesPlugin.getWorkspace().getRoot().getProject(getWsClientProjectName());
String pkg = "test/ws";
String cls = "src/" + pkg + "/EchoService.java";
Assert.assertTrue(p.getFile(cls).exists());
cls = "src/" + pkg + "/clientsample/ClientSample.java";
Assert.assertTrue(p.getFile(cls).exists());
//create JSP
new NewFileWizardAction().run().selectTemplate("Web", "JSP File").next();
Wizard w = new Wizard();
w.bot().textWithLabel("File name:").setText("index");
w.bot().textWithLabel("Enter or select the parent folder:").setText(getWsClientProjectName() + "/WebContent");
w.finish();
bot.sleep(TIME_5S);
/**
* Workaround for 4.x branch
*
* bot.activeShell().bot().button("Skip").click();
* bot.sleep(TIME_5S);
*/
SWTBotEclipseEditor st = bot.editorByTitle("index.jsp").toTextEditor();
st.selectRange(0, 0, st.getText().length());
st.setText(resourceHelper.readStream(EAPFromJavaTest.class.getResourceAsStream("/resources/jbossws/index.jsp.ws")));
st.saveAndClose();
bot.sleep(TIME_1S*2);
deploymentHelper.runProject(getWsClientProjectName());
String pageContent = deploymentHelper.getPage("http://localhost:8080/" + getWsClientProjectName() + "/index.jsp", 15000);
LOGGER.info(pageContent);
Assert.assertTrue(pageContent.contains("BartSimpson(age: 12)"));
Assert.assertTrue(pageContent.contains("Homer(age: 44)"));
}
| public void testClient() {
Assert.assertTrue("service must exist", servicePassed);
clientHelper.createClient(deploymentHelper.getWSDLUrl(getWsProjectName(), getWsName()),
getWsClientProjectName(), getLevel(), "");
IProject p = ResourcesPlugin.getWorkspace().getRoot().getProject(getWsClientProjectName());
String pkg = "test/ws";
String cls = "src/" + pkg + "/EchoService.java";
Assert.assertTrue(p.getFile(cls).exists());
cls = "src/" + pkg + "/clientsample/ClientSample.java";
Assert.assertTrue(p.getFile(cls).exists());
//create JSP
new NewFileWizardAction().run().selectTemplate("Web", "JSP File").next();
Wizard w = new Wizard();
w.bot().textWithLabel("File name:").setText("index");
w.bot().textWithLabel("Enter or select the parent folder:").setText(getWsClientProjectName() + "/WebContent");
w.finish();
bot.sleep(TIME_5S);
/**
* Workaround for 4.x branch
*
* bot.activeShell().bot().button("Skip").click();
* bot.sleep(TIME_5S);
*/
SWTBotEclipseEditor st = bot.editorByTitle("index.jsp").toTextEditor();
st.selectRange(0, 0, st.getText().length());
st.setText(resourceHelper.readStream(EAPFromJavaTest.class.getResourceAsStream("/resources/jbossws/index.jsp.ws")));
st.saveAndClose();
bot.sleep(TIME_1S*2);
deploymentHelper.runProject(getWsClientProjectName());
servers.cleanServer(configuredState.getServer().name);
String pageContent = deploymentHelper.getPage("http://localhost:8080/" + getWsClientProjectName() + "/index.jsp", 15000);
LOGGER.info(pageContent);
Assert.assertTrue(pageContent.contains("BartSimpson(age: 12)"));
Assert.assertTrue(pageContent.contains("Homer(age: 44)"));
}
|
diff --git a/src/test/java/org/surfnet/cruncher/resource/CruncherResourceTest.java b/src/test/java/org/surfnet/cruncher/resource/CruncherResourceTest.java
index ace2c81..15bedbe 100644
--- a/src/test/java/org/surfnet/cruncher/resource/CruncherResourceTest.java
+++ b/src/test/java/org/surfnet/cruncher/resource/CruncherResourceTest.java
@@ -1,169 +1,169 @@
/*
* Copyright 2013 SURFnet bv, The Netherlands
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.surfnet.cruncher.resource;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.Response;
import org.joda.time.LocalDate;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.surfnet.cruncher.config.SpringConfiguration;
import org.surfnet.cruncher.model.LoginData;
import org.surfnet.cruncher.model.SpStatistic;
@SuppressWarnings("unchecked")
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = SpringConfiguration.class)
public class CruncherResourceTest {
@Inject
private CruncherResource cruncherResource;
@Inject
private JdbcTemplate jdbcTemplate;
private HttpServletRequest request = null; //currently never really used
@Test
public void getUniqueLogins() {
LocalDate start = new LocalDate(0L);
LocalDate end = new LocalDate(System.currentTimeMillis());
Response response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "sp1");
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
- assertEquals(13, result.size());
+ assertEquals(12, result.size());
response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "unknown");
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(0, result.size());
LocalDate startDate = new LocalDate(2013, 1, 1);
LocalDate endDate = new LocalDate(2013, 1, 4);
response = cruncherResource.getUniqueLogins(request, startDate.toDate().getTime(), endDate.toDate().getTime(), "idp1", null);
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(8, result.size());
LoginData first = result.get(0);
assertEquals(20, first.getTotal());
assertEquals("idp1", first.getIdpEntityId());
}
@Test
public void getLogins() {
LocalDate start = new LocalDate(2013, 1, 1);
LocalDate end = new LocalDate(2013, 1, 12);
Response response = cruncherResource.getLoginsPerInterval(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "sp1");
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(1, result.size());
LoginData data = result.get(0);
checkSp1Entry(data);
}
@Test
public void getMultipleLogins() {
LocalDate start = new LocalDate(2013, 1, 1);
LocalDate end = new LocalDate(2013, 1, 12);
Response response = cruncherResource.getLoginsPerInterval(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", null);
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(2, result.size());
LoginData first = result.get(0);
LoginData second = result.get(1);
if (first.getSpEntityId().equals("sp1")) {
checkSp1Entry(first);
} else {
checkSp1Entry(second);
}
}
private void checkSp1Entry(LoginData data) {
assertEquals(240, data.getTotal());
assertEquals(12, data.getData().size());
assertEquals(20, (int) data.getData().get(0));
assertEquals(20, (int) data.getData().get(6));
assertEquals(20, (int) data.getData().get(11));
}
@Test
public void testIllegalArguments() {
try {
cruncherResource.getLoginsPerInterval(request, 0L, 0L, null, null);
fail("Should have received an illegal argument exception");
} catch (IllegalArgumentException iae) {
// expected
}
}
@Test
public void testResponseWithZeros() {
LocalDate start = new LocalDate(2013, 1, 10);
LocalDate end = new LocalDate(2013, 1, 20);
Response response = cruncherResource.getLoginsPerInterval(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "sp1");
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(1, result.size());
LoginData loginData = result.get(0);
assertEquals(11, loginData.getData().size());
assertEquals(20, (int)loginData.getData().get(2));
assertEquals(0, (int)loginData.getData().get(3));
assertEquals(0, (int)loginData.getData().get(4));
assertEquals(0, (int)loginData.getData().get(10));
}
@Test
public void getActiveServices() {
Response response = cruncherResource.getRecentLoginsForUser(request, "user_1", "idp2");
List<SpStatistic> result = (List<SpStatistic>) response.getEntity();
assertNotNull(result);
assertEquals(2, result.size());
SpStatistic currentStat = result.get(0);
if (currentStat.getSpEntityId().equals("sp2")) {
checkStatistics(result.get(0));
} else {
checkStatistics(result.get(1));
}
}
private void checkStatistics(SpStatistic spStatistic) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
java.util.Date entryDate = null;
try {
entryDate = sdf.parse("2012-04-19 11:48:41");
} catch (ParseException e) {
e.printStackTrace();
}
assertEquals(entryDate.getTime(), spStatistic.getEntryTime());
}
}
| true | true | public void getUniqueLogins() {
LocalDate start = new LocalDate(0L);
LocalDate end = new LocalDate(System.currentTimeMillis());
Response response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "sp1");
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(13, result.size());
response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "unknown");
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(0, result.size());
LocalDate startDate = new LocalDate(2013, 1, 1);
LocalDate endDate = new LocalDate(2013, 1, 4);
response = cruncherResource.getUniqueLogins(request, startDate.toDate().getTime(), endDate.toDate().getTime(), "idp1", null);
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(8, result.size());
LoginData first = result.get(0);
assertEquals(20, first.getTotal());
assertEquals("idp1", first.getIdpEntityId());
}
| public void getUniqueLogins() {
LocalDate start = new LocalDate(0L);
LocalDate end = new LocalDate(System.currentTimeMillis());
Response response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "sp1");
List<LoginData> result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(12, result.size());
response = cruncherResource.getUniqueLogins(request, start.toDate().getTime(), end.toDate().getTime(), "idp1", "unknown");
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(0, result.size());
LocalDate startDate = new LocalDate(2013, 1, 1);
LocalDate endDate = new LocalDate(2013, 1, 4);
response = cruncherResource.getUniqueLogins(request, startDate.toDate().getTime(), endDate.toDate().getTime(), "idp1", null);
result = (List<LoginData>) response.getEntity();
assertNotNull(result);
assertEquals(8, result.size());
LoginData first = result.get(0);
assertEquals(20, first.getTotal());
assertEquals("idp1", first.getIdpEntityId());
}
|
diff --git a/lib/src/main/java/com/github/fhirschmann/clozegen/lib/register/RegisterFactory.java b/lib/src/main/java/com/github/fhirschmann/clozegen/lib/register/RegisterFactory.java
index 7fe1073..f4cc029 100644
--- a/lib/src/main/java/com/github/fhirschmann/clozegen/lib/register/RegisterFactory.java
+++ b/lib/src/main/java/com/github/fhirschmann/clozegen/lib/register/RegisterFactory.java
@@ -1,143 +1,144 @@
/*
* Copyright (C) 2012 Fabian Hirschmann <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package com.github.fhirschmann.clozegen.lib.register;
import com.github.fhirschmann.clozegen.lib.components.GapAnnotator;
import com.github.fhirschmann.clozegen.lib.adapters.CollocationAdapter;
import com.google.common.collect.Sets;
import de.tudarmstadt.ukp.dkpro.core.io.pdf.PdfReader;
import de.tudarmstadt.ukp.dkpro.core.io.text.TextReader;
import java.net.URL;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.uima.collection.CollectionReader;
import org.apache.uima.resource.ResourceInitializationException;
import static org.uimafit.factory.ExternalResourceFactory.createExternalResourceDescription;
import static org.uimafit.factory.CollectionReaderFactory.createCollectionReader;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkArgument;
import de.tudarmstadt.ukp.dkpro.core.api.io.ResourceCollectionReaderBase;
import java.io.File;
import java.net.URI;
import java.util.Arrays;
import org.apache.uima.collection.CollectionReader_ImplBase;
/**
*
* @author Fabian Hirschmann <[email protected]>
*/
public final class RegisterFactory {
public static final Logger LOGGER = Logger.getLogger(RegisterFactory.class.getName());
/** Utility class cannot be called. */
private RegisterFactory() {
}
/**
* Creates a new {@link DescriptionRegister} prefilled with known descriptions
* for gap generation.
*
* @return a new {@link DescriptionRegister}
* @throws ResourceInitializationException on errors
*/
public static DescriptionRegister createDefaultDescriptionRegister()
throws ResourceInitializationException {
DescriptionRegister register = new DescriptionRegister();
DescriptionRegisterEntry entry = new DescriptionRegisterEntry("prepositions",
GapAnnotator.class,
GapAnnotator.ADAPTER_KEY,
createExternalResourceDescription(
CollocationAdapter.class,
- CollocationAdapter.PARAM_PATH, "frequencies/en/prepositions"));
+ CollocationAdapter.PARAM_PATH,
+ "frequencies/en/prepositions/trigrams.txt"));
entry.setName("Preposition Gap Generator");
entry.setSupportedLanguages(Sets.newHashSet("en"));
register.add(entry);
return register;
}
/**
* Convenience method for creating {@link CollectionReader} patterns which
* should work on only one input file.
*
* @param file the file in question
* @return a new pattern
*/
public static String[] createPatterns(final File file) {
String[] patterns = new String[] {String.format("[+]%s",
checkNotNull(file.getName()))};
return patterns;
}
/**
* Creates a new standard {@link CollectionReader} based upon the input
* parameters.
*
* @param clazz the collection reader class
* @param file the input file
* @param languageCode the language code
* @return a new collection reader
* @throws ResourceInitializationException on errors during initialization
*/
public static CollectionReader createDefaultReader(
final Class<? extends CollectionReader_ImplBase> clazz,
final File file, final String languageCode)
throws ResourceInitializationException {
return createCollectionReader(
clazz,
ResourceCollectionReaderBase.PARAM_LANGUAGE, languageCode,
ResourceCollectionReaderBase.PARAM_PATH, file.getParent(),
ResourceCollectionReaderBase.PARAM_PATTERNS, createPatterns(file));
}
/**
* Creates a new standard {@link ReaderRegisterEntry}.
*
* @param clazz the collection reader class
* @return a new register entry
*/
public static ReaderRegisterEntry createDefaultReaderRegisterEntry(
final Class<? extends CollectionReader_ImplBase> clazz) {
return new ReaderRegisterEntry() {
@Override
public CollectionReader get(final File file, final String languageCode) {
try {
return createDefaultReader(clazz, file, languageCode);
} catch (ResourceInitializationException ex) {
LOGGER.log(Level.SEVERE, null, ex);
return null;
}
}
};
}
public static ReaderRegister createDefaultReaderRegister() {
ReaderRegister register = new ReaderRegister();
ReaderRegisterEntry txt = createDefaultReaderRegisterEntry(TextReader.class);
register.put("txt", txt);
register.put("text", txt);
ReaderRegisterEntry pdf = createDefaultReaderRegisterEntry(PdfReader.class);
register.put("pdf", pdf);
return register;
}
}
| true | true | public static DescriptionRegister createDefaultDescriptionRegister()
throws ResourceInitializationException {
DescriptionRegister register = new DescriptionRegister();
DescriptionRegisterEntry entry = new DescriptionRegisterEntry("prepositions",
GapAnnotator.class,
GapAnnotator.ADAPTER_KEY,
createExternalResourceDescription(
CollocationAdapter.class,
CollocationAdapter.PARAM_PATH, "frequencies/en/prepositions"));
entry.setName("Preposition Gap Generator");
entry.setSupportedLanguages(Sets.newHashSet("en"));
register.add(entry);
return register;
}
| public static DescriptionRegister createDefaultDescriptionRegister()
throws ResourceInitializationException {
DescriptionRegister register = new DescriptionRegister();
DescriptionRegisterEntry entry = new DescriptionRegisterEntry("prepositions",
GapAnnotator.class,
GapAnnotator.ADAPTER_KEY,
createExternalResourceDescription(
CollocationAdapter.class,
CollocationAdapter.PARAM_PATH,
"frequencies/en/prepositions/trigrams.txt"));
entry.setName("Preposition Gap Generator");
entry.setSupportedLanguages(Sets.newHashSet("en"));
register.add(entry);
return register;
}
|
diff --git a/src/me/neatmonster/spacemodule/management/VersionsManager.java b/src/me/neatmonster/spacemodule/management/VersionsManager.java
index 19e8f11..3e08647 100644
--- a/src/me/neatmonster/spacemodule/management/VersionsManager.java
+++ b/src/me/neatmonster/spacemodule/management/VersionsManager.java
@@ -1,89 +1,89 @@
/*
* This file is part of SpaceModule (http://spacebukkit.xereo.net/).
*
* SpaceModule is free software: you can redistribute it and/or modify it under the terms of the
* Attribution-NonCommercial-ShareAlike Unported (CC BY-NC-SA) license as published by the Creative
* Common organization, either version 3.0 of the license, or (at your option) any later version.
*
* SpaceBukkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Attribution-NonCommercial-ShareAlike Unported (CC BY-NC-SA) license for more details.
*
* You should have received a copy of the Attribution-NonCommercial-ShareAlike Unported (CC BY-NC-SA)
* license along with this program. If not, see <http://creativecommons.org/licenses/by-nc-sa/3.0/>.
*/
package me.neatmonster.spacemodule.management;
import java.util.LinkedHashMap;
import me.neatmonster.spacemodule.SpaceModule;
import me.neatmonster.spacemodule.utilities.Console;
import me.neatmonster.spacemodule.utilities.Utilities;
import org.bukkit.util.config.Configuration;
@SuppressWarnings("deprecation")
public class VersionsManager {
public final String PROJECT_NAME;
public String ARTIFACT_NAME;
public int RECOMMENDED;
public int DEVELOPMENT;
private final LinkedHashMap<Integer, String> builds = new LinkedHashMap<Integer, String>();
public VersionsManager(final String projectName) {
PROJECT_NAME = projectName;
}
public int match(final String md5) {
if (builds.containsValue(md5))
for (final int buildNumber : builds.keySet()) {
final String bMD5 = builds.get(buildNumber);
if (bMD5.equalsIgnoreCase(md5))
return buildNumber;
}
return 0;
}
public void setup() {
Console.progress("Checking for updates", 0);
final Configuration database = new Configuration(SpaceModule.DATABASE);
database.load();
final int lastChecked = database.getInt(PROJECT_NAME + ".LastChecked", 0);
final String developmentPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/lastSuccessfulBuild/buildNumber/");
DEVELOPMENT = Integer.parseInt(developmentPage);
Console.progress("Checking for updates", (int) Math.round(1D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String recommendedPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/Recommended/buildNumber/");
RECOMMENDED = Integer.parseInt(recommendedPage);
Console.progress("Checking for updates", (int) Math.round(2D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String artifactPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ (SpaceModule.getInstance().recommended ? "Recommended" : "lastSuccessfulBuild") + "/");
int beginIndex = artifactPage.indexOf(PROJECT_NAME.toLowerCase());
final int endIndex = artifactPage.substring(beginIndex).indexOf(".jar") + beginIndex + 4;
ARTIFACT_NAME = artifactPage.substring(beginIndex, endIndex);
Console.progress("Checking for updates", (int) Math.round(3D / (DEVELOPMENT - lastChecked + 3D) * 100D));
if (lastChecked > 0)
for (int buildNumber = 1; buildNumber < lastChecked + 1; buildNumber++) {
final String md5 = database.getString(PROJECT_NAME + ".Build" + buildNumber, null);
builds.put(buildNumber, md5);
}
for (int buildNumber = lastChecked + 1; buildNumber < DEVELOPMENT + 1; buildNumber++) {
final String buildPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ buildNumber + "/artifact/target/" + ARTIFACT_NAME + "/*fingerprint*/");
if (buildPage != null) {
beginIndex = buildPage.indexOf("<div class=\"md5sum\">MD5: ") + 25;
final String md5 = buildPage.substring(beginIndex, beginIndex + 32);
builds.put(buildNumber, md5);
database.setProperty(PROJECT_NAME + ".Build" + buildNumber, md5);
}
Console.progress("Checking for updates",
- (int) Math.round(((double) buildNumber + 3) / (DEVELOPMENT - lastChecked + 3D) * 100D));
+ (int) Math.round((buildNumber - lastChecked + 3D) / (DEVELOPMENT - lastChecked + 3D) * 100D));
}
Console.newLine();
database.setProperty(PROJECT_NAME + ".LastChecked", DEVELOPMENT);
database.save();
}
}
| true | true | public void setup() {
Console.progress("Checking for updates", 0);
final Configuration database = new Configuration(SpaceModule.DATABASE);
database.load();
final int lastChecked = database.getInt(PROJECT_NAME + ".LastChecked", 0);
final String developmentPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/lastSuccessfulBuild/buildNumber/");
DEVELOPMENT = Integer.parseInt(developmentPage);
Console.progress("Checking for updates", (int) Math.round(1D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String recommendedPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/Recommended/buildNumber/");
RECOMMENDED = Integer.parseInt(recommendedPage);
Console.progress("Checking for updates", (int) Math.round(2D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String artifactPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ (SpaceModule.getInstance().recommended ? "Recommended" : "lastSuccessfulBuild") + "/");
int beginIndex = artifactPage.indexOf(PROJECT_NAME.toLowerCase());
final int endIndex = artifactPage.substring(beginIndex).indexOf(".jar") + beginIndex + 4;
ARTIFACT_NAME = artifactPage.substring(beginIndex, endIndex);
Console.progress("Checking for updates", (int) Math.round(3D / (DEVELOPMENT - lastChecked + 3D) * 100D));
if (lastChecked > 0)
for (int buildNumber = 1; buildNumber < lastChecked + 1; buildNumber++) {
final String md5 = database.getString(PROJECT_NAME + ".Build" + buildNumber, null);
builds.put(buildNumber, md5);
}
for (int buildNumber = lastChecked + 1; buildNumber < DEVELOPMENT + 1; buildNumber++) {
final String buildPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ buildNumber + "/artifact/target/" + ARTIFACT_NAME + "/*fingerprint*/");
if (buildPage != null) {
beginIndex = buildPage.indexOf("<div class=\"md5sum\">MD5: ") + 25;
final String md5 = buildPage.substring(beginIndex, beginIndex + 32);
builds.put(buildNumber, md5);
database.setProperty(PROJECT_NAME + ".Build" + buildNumber, md5);
}
Console.progress("Checking for updates",
(int) Math.round(((double) buildNumber + 3) / (DEVELOPMENT - lastChecked + 3D) * 100D));
}
Console.newLine();
database.setProperty(PROJECT_NAME + ".LastChecked", DEVELOPMENT);
database.save();
}
| public void setup() {
Console.progress("Checking for updates", 0);
final Configuration database = new Configuration(SpaceModule.DATABASE);
database.load();
final int lastChecked = database.getInt(PROJECT_NAME + ".LastChecked", 0);
final String developmentPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/lastSuccessfulBuild/buildNumber/");
DEVELOPMENT = Integer.parseInt(developmentPage);
Console.progress("Checking for updates", (int) Math.round(1D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String recommendedPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME
+ "/Recommended/buildNumber/");
RECOMMENDED = Integer.parseInt(recommendedPage);
Console.progress("Checking for updates", (int) Math.round(2D / (DEVELOPMENT - lastChecked + 3D) * 100D));
final String artifactPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ (SpaceModule.getInstance().recommended ? "Recommended" : "lastSuccessfulBuild") + "/");
int beginIndex = artifactPage.indexOf(PROJECT_NAME.toLowerCase());
final int endIndex = artifactPage.substring(beginIndex).indexOf(".jar") + beginIndex + 4;
ARTIFACT_NAME = artifactPage.substring(beginIndex, endIndex);
Console.progress("Checking for updates", (int) Math.round(3D / (DEVELOPMENT - lastChecked + 3D) * 100D));
if (lastChecked > 0)
for (int buildNumber = 1; buildNumber < lastChecked + 1; buildNumber++) {
final String md5 = database.getString(PROJECT_NAME + ".Build" + buildNumber, null);
builds.put(buildNumber, md5);
}
for (int buildNumber = lastChecked + 1; buildNumber < DEVELOPMENT + 1; buildNumber++) {
final String buildPage = Utilities.getContent("http://dev.drdanick.com/jenkins/job/" + PROJECT_NAME + "/"
+ buildNumber + "/artifact/target/" + ARTIFACT_NAME + "/*fingerprint*/");
if (buildPage != null) {
beginIndex = buildPage.indexOf("<div class=\"md5sum\">MD5: ") + 25;
final String md5 = buildPage.substring(beginIndex, beginIndex + 32);
builds.put(buildNumber, md5);
database.setProperty(PROJECT_NAME + ".Build" + buildNumber, md5);
}
Console.progress("Checking for updates",
(int) Math.round((buildNumber - lastChecked + 3D) / (DEVELOPMENT - lastChecked + 3D) * 100D));
}
Console.newLine();
database.setProperty(PROJECT_NAME + ".LastChecked", DEVELOPMENT);
database.save();
}
|
diff --git a/src/plugins/Freetalk/WoT/WoTMessageManager.java b/src/plugins/Freetalk/WoT/WoTMessageManager.java
index 57b62ca0..e209994b 100644
--- a/src/plugins/Freetalk/WoT/WoTMessageManager.java
+++ b/src/plugins/Freetalk/WoT/WoTMessageManager.java
@@ -1,357 +1,357 @@
/* This code is part of Freenet. It is distributed under the GNU General
* Public License, version 2 (or at your option any later version). See
* http://www.gnu.org/ for further details of the GPL. */
package plugins.Freetalk.WoT;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import plugins.Freetalk.Board;
import plugins.Freetalk.FTIdentity;
import plugins.Freetalk.FTOwnIdentity;
import plugins.Freetalk.FetchFailedMarker;
import plugins.Freetalk.Freetalk;
import plugins.Freetalk.IdentityManager;
import plugins.Freetalk.Message;
import plugins.Freetalk.MessageList;
import plugins.Freetalk.MessageManager;
import plugins.Freetalk.MessageURI;
import plugins.Freetalk.Persistent;
import plugins.Freetalk.Message.Attachment;
import plugins.Freetalk.exceptions.NoSuchFetchFailedMarkerException;
import plugins.Freetalk.exceptions.NoSuchMessageException;
import plugins.Freetalk.exceptions.NoSuchMessageListException;
import plugins.Freetalk.tasks.PersistentTaskManager;
import com.db4o.ObjectContainer;
import com.db4o.ObjectSet;
import com.db4o.ext.ExtObjectContainer;
import com.db4o.query.Query;
import freenet.keys.FreenetURI;
import freenet.node.RequestClient;
import freenet.pluginmanager.PluginRespirator;
import freenet.support.CurrentTimeUTC;
import freenet.support.Logger;
public class WoTMessageManager extends MessageManager {
/** One for all requests for WoTMessage*, for fairness. */
final RequestClient mRequestClient;
public WoTMessageManager(ExtObjectContainer myDB, IdentityManager myIdentityManager, Freetalk myFreetalk, PluginRespirator myPluginRespirator) {
super(myDB, myIdentityManager, myFreetalk, myPluginRespirator);
mRequestClient = new RequestClient() {
public boolean persistent() {
return false;
}
public void removeFrom(ObjectContainer container) {
throw new UnsupportedOperationException();
}
};;
}
/**
* For being used in JUnit tests to run without a node.
*/
public WoTMessageManager(Freetalk myFreetalk) {
super(myFreetalk);
mRequestClient = null;
}
/**
* Only for being used by the MessageManager itself and by unit tests.
*/
protected synchronized void clearExpiredFetchFailedMarkers() {
super.clearExpiredFetchFailedMarkers();
}
public WoTOwnMessage postMessage(MessageURI myParentThreadURI, Message myParentMessage, Set<Board> myBoards, Board myReplyToBoard,
FTOwnIdentity myAuthor, String myTitle, Date myDate, String myText, List<Attachment> myAttachments) throws Exception {
WoTOwnMessage m;
if(myParentThreadURI != null && !(myParentThreadURI instanceof WoTMessageURI))
throw new IllegalArgumentException("Parent thread URI is no WoTMessageURI: " + myParentThreadURI);
Date date = myDate!=null ? myDate : CurrentTimeUTC.get();
m = WoTOwnMessage.construct((WoTMessageURI)myParentThreadURI, myParentMessage, myBoards, myReplyToBoard, myAuthor, myTitle, date, myText, myAttachments);
m.initializeTransient(mFreetalk);
synchronized(this) {
m.storeAndCommit();
}
if(mFreetalk != null) {
PersistentTaskManager taskManager = mFreetalk.getTaskManager();
if(taskManager != null)
taskManager.onOwnMessagePosted(m);
}
/* We do not add the message to the boards it is posted to because the user should only see the message if it has been downloaded
* successfully. This helps the user to spot problems: If he does not see his own messages we can hope that he reports a bug */
return m;
}
@Override
public synchronized void onMessageListInsertFailed(FreenetURI uri,boolean collision) throws NoSuchMessageListException {
synchronized(db.lock()) {
try {
WoTOwnMessageList list = (WoTOwnMessageList)getOwnMessageList(MessageList.getIDFromURI(uri));
list.cancelInsert();
if(collision)
list.incrementInsertIndex();
db.commit(); Logger.debug(this, "COMMITED.");
}
catch(RuntimeException e) {
Persistent.checkedRollback(db, this, e);
}
}
}
public synchronized void onMessageListFetchFailed(FTIdentity author, FreenetURI uri, FetchFailedMarker.Reason reason) {
WoTMessageList ghostList = new WoTMessageList(author, uri);
ghostList.initializeTransient(mFreetalk);
MessageList.MessageListFetchFailedMarker marker;
try {
getMessageList(ghostList.getID());
Logger.error(this, "Download failed of a MessageList which we already have: " + ghostList.getURI());
return;
}
catch(NoSuchMessageListException e1) {
try {
marker = getMessageListFetchFailedMarker(ghostList.getID());
} catch(NoSuchFetchFailedMarkerException e) {
marker = null;
}
}
synchronized(db.lock()) {
try {
Date date = CurrentTimeUTC.get();
Date dateOfNextRetry;
ghostList.storeWithoutCommit();
if(marker == null) {
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, 0);
marker = new MessageList.MessageListFetchFailedMarker(ghostList, reason, date, dateOfNextRetry);
marker.initializeTransient(mFreetalk);
} else {
marker.setReason(reason);
marker.incrementNumberOfRetries();
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, marker.getNumberOfRetries());
marker.setDate(date);
marker.setDateOfNextRetry(dateOfNextRetry);
}
marker.storeWithoutCommit();
Logger.normal(this, "Marked MessageList as download failed with reason " + reason + " (next retry is at " + dateOfNextRetry
+ ", number of retries: " + marker.getNumberOfRetries() + "): "
+ ghostList);
db.commit();
Logger.debug(this, "COMMITED.");
}
catch(Exception ex) {
- Persistent.checkedRollback(db, this, e);
+ Persistent.checkedRollback(db, this, ex);
}
}
}
public synchronized void onOwnMessageInserted(String id, FreenetURI realURI) throws NoSuchMessageException {
WoTOwnMessage message = (WoTOwnMessage) getOwnMessage(id);
synchronized(message) {
synchronized(db.lock()) {
try {
message.markAsInserted(realURI);
addMessageToMessageList(message);
db.commit(); Logger.debug(this, "COMMITED.");
}
catch(RuntimeException e) {
Persistent.rollbackAndThrow(db, this, e);
}
}
}
}
/**
* You have to synchronize on this MessageManager and on db.lock() when using this function.
*/
private void addMessageToMessageList(WoTOwnMessage message) {
Query query = db.query();
query.constrain(WoTOwnMessageList.class);
query.descend("mAuthor").constrain(message.getAuthor()).identity();
query.descend("iWasInserted").constrain(false);
query.descend("iAmBeingInserted").constrain(false);
for(WoTOwnMessageList list : generalGetOwnMessageListIterable(query)) {
try {
// FIXME: list.addMessage is synchronized and the caller of this function synchronizes on db.lock() - wrong order! This could cause deadlocks.
list.addMessage(message);
Logger.debug(this, "Added own message " + message + " to list " + list);
return;
}
catch(RuntimeException e) {
/* The list is full. */
Logger.debug(this, "Not adding message " + message.getID() + " to message list " + list.getID(), e);
}
}
WoTOwnIdentity author = (WoTOwnIdentity)message.getAuthor();
WoTOwnMessageList list = new WoTOwnMessageList(author, getFreeOwnMessageListIndex(author));
list.initializeTransient(mFreetalk);
// FIXME: list.addMessage is synchronized and the caller of this function synchronizes on db.lock() - wrong order! This could cause deadlocks.
list.addMessage(message);
list.storeWithoutCommit();
Logger.debug(this, "Found no list with free space, created the new list " + list.getID() + " for own message " + message.getID());
}
@SuppressWarnings("unchecked")
public synchronized Iterable<WoTOwnMessage> getNotInsertedOwnMessages() {
return new Iterable<WoTOwnMessage>() {
public Iterator<WoTOwnMessage> iterator() {
return new Iterator<WoTOwnMessage>() {
private Iterator<WoTOwnMessage> iter;
{
Query query = db.query();
query.constrain(WoTOwnMessage.class);
query.descend("mRealURI").constrain(null).identity();
iter = query.execute().iterator();
}
public boolean hasNext() {
return iter.hasNext();
}
public WoTOwnMessage next() {
WoTOwnMessage next = iter.next();
next.initializeTransient(mFreetalk);
return next;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* For a database Query of result type <code>ObjectSet\<WoTOwnMessageList\></code>, this function provides an <code>Iterable</code>. The
* iterator of the ObjectSet cannot be used instead because it will not call initializeTransient() on the objects. The iterator which is
* returned by this function takes care of that.
* Please synchronize on the <code>WoTMessageManager</code> when using this function, it is not synchronized itself.
*/
protected Iterable<WoTOwnMessageList> generalGetOwnMessageListIterable(final Query query) {
return new Iterable<WoTOwnMessageList>(){
@SuppressWarnings("unchecked")
public Iterator<WoTOwnMessageList> iterator() {
return new Iterator<WoTOwnMessageList>() {
private Iterator<WoTOwnMessageList> iter = query.execute().iterator();
public boolean hasNext() {
return iter.hasNext();
}
public WoTOwnMessageList next() {
WoTOwnMessageList next = iter.next();
next.initializeTransient(mFreetalk);
return next;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
}
/**
* Returns <code>OwnMessageList</code> objects which are marked as not inserted. It will also return those which are marked as currently
* being inserted, they are not filtered out because in the current implementation the WoTMessageListInserter will cancel all inserts
* before using this function.
*/
public synchronized Iterable<WoTOwnMessageList> getNotInsertedOwnMessageLists() {
Query query = db.query();
query.constrain(WoTOwnMessageList.class);
query.descend("iWasInserted").constrain(false);
return generalGetOwnMessageListIterable(query);
}
public synchronized Iterable<WoTOwnMessageList> getBeingInsertedOwnMessageLists() {
Query query = db.query();
query.constrain(WoTOwnMessageList.class);
query.descend("iWasInserted").constrain(false);
query.descend("iAmBeingInserted").constrain(true);
return generalGetOwnMessageListIterable(query);
}
@SuppressWarnings("unchecked")
public synchronized int getUnavailableNewMessageListIndex(FTIdentity identity) {
Query query = db.query();
query.constrain(WoTMessageList.class);
query.constrain(WoTOwnMessageList.class).not();
query.descend("mAuthor").constrain(identity).identity();
query.descend("mIndex").orderDescending(); // FIXME: This is inefficient!
ObjectSet<WoTMessageList> result = query.execute();
if(result.size() == 0)
return 0;
return result.next().getIndex() + 1;
}
@SuppressWarnings("unchecked")
public synchronized int getUnavailableOldMessageListIndex(FTIdentity identity) {
Query query = db.query();
query.constrain(WoTMessageList.class);
query.constrain(WoTOwnMessageList.class).not();
query.descend("mAuthor").constrain(identity).identity();
query.descend("mIndex").orderDescending(); // FIXME: This is inefficient!
ObjectSet<WoTMessageList> result = query.execute();
if(result.size() == 0)
return 0;
int latestAvailableIndex = result.next().getIndex();
int freeIndex = latestAvailableIndex - 1;
for(; result.hasNext() && result.next().getIndex() == freeIndex; ) {
--freeIndex;
}
/* FIXME: To avoid always checking ALL messagelists for a missing one, store somewhere in the FTIdentity what the latest index is up to
* which all messagelists are available! */
return freeIndex >= 0 ? freeIndex : latestAvailableIndex+1;
}
/**
* Get the next free index for an OwnMessageList. You have to synchronize on this MessageManager while creating an OwnMessageList, this
* function does not provide synchronization.
*/
@SuppressWarnings("unchecked")
public int getFreeOwnMessageListIndex(WoTOwnIdentity messageAuthor) {
Query q = db.query();
/* We query for MessageList and not OwnMessageList because the user might have deleted his own messages or lost his database */
q.constrain(MessageList.class);
q.descend("mAuthor").constrain(messageAuthor).identity();
q.descend("mIndex").orderDescending(); // FIXME: This is inefficient!
ObjectSet<MessageList> result = q.execute();
return result.size() > 0 ? result.next().getIndex()+1 : 0;
}
}
| true | true | public synchronized void onMessageListFetchFailed(FTIdentity author, FreenetURI uri, FetchFailedMarker.Reason reason) {
WoTMessageList ghostList = new WoTMessageList(author, uri);
ghostList.initializeTransient(mFreetalk);
MessageList.MessageListFetchFailedMarker marker;
try {
getMessageList(ghostList.getID());
Logger.error(this, "Download failed of a MessageList which we already have: " + ghostList.getURI());
return;
}
catch(NoSuchMessageListException e1) {
try {
marker = getMessageListFetchFailedMarker(ghostList.getID());
} catch(NoSuchFetchFailedMarkerException e) {
marker = null;
}
}
synchronized(db.lock()) {
try {
Date date = CurrentTimeUTC.get();
Date dateOfNextRetry;
ghostList.storeWithoutCommit();
if(marker == null) {
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, 0);
marker = new MessageList.MessageListFetchFailedMarker(ghostList, reason, date, dateOfNextRetry);
marker.initializeTransient(mFreetalk);
} else {
marker.setReason(reason);
marker.incrementNumberOfRetries();
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, marker.getNumberOfRetries());
marker.setDate(date);
marker.setDateOfNextRetry(dateOfNextRetry);
}
marker.storeWithoutCommit();
Logger.normal(this, "Marked MessageList as download failed with reason " + reason + " (next retry is at " + dateOfNextRetry
+ ", number of retries: " + marker.getNumberOfRetries() + "): "
+ ghostList);
db.commit();
Logger.debug(this, "COMMITED.");
}
catch(Exception ex) {
Persistent.checkedRollback(db, this, e);
}
}
}
| public synchronized void onMessageListFetchFailed(FTIdentity author, FreenetURI uri, FetchFailedMarker.Reason reason) {
WoTMessageList ghostList = new WoTMessageList(author, uri);
ghostList.initializeTransient(mFreetalk);
MessageList.MessageListFetchFailedMarker marker;
try {
getMessageList(ghostList.getID());
Logger.error(this, "Download failed of a MessageList which we already have: " + ghostList.getURI());
return;
}
catch(NoSuchMessageListException e1) {
try {
marker = getMessageListFetchFailedMarker(ghostList.getID());
} catch(NoSuchFetchFailedMarkerException e) {
marker = null;
}
}
synchronized(db.lock()) {
try {
Date date = CurrentTimeUTC.get();
Date dateOfNextRetry;
ghostList.storeWithoutCommit();
if(marker == null) {
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, 0);
marker = new MessageList.MessageListFetchFailedMarker(ghostList, reason, date, dateOfNextRetry);
marker.initializeTransient(mFreetalk);
} else {
marker.setReason(reason);
marker.incrementNumberOfRetries();
dateOfNextRetry = calculateDateOfNextMessageListFetchRetry(reason, date, marker.getNumberOfRetries());
marker.setDate(date);
marker.setDateOfNextRetry(dateOfNextRetry);
}
marker.storeWithoutCommit();
Logger.normal(this, "Marked MessageList as download failed with reason " + reason + " (next retry is at " + dateOfNextRetry
+ ", number of retries: " + marker.getNumberOfRetries() + "): "
+ ghostList);
db.commit();
Logger.debug(this, "COMMITED.");
}
catch(Exception ex) {
Persistent.checkedRollback(db, this, ex);
}
}
}
|
diff --git a/org.eclipse.m2e.tests/src/org/eclipse/m2e/tests/MarkerTest.java b/org.eclipse.m2e.tests/src/org/eclipse/m2e/tests/MarkerTest.java
index 7b9e4e1c..62ca3727 100644
--- a/org.eclipse.m2e.tests/src/org/eclipse/m2e/tests/MarkerTest.java
+++ b/org.eclipse.m2e.tests/src/org/eclipse/m2e/tests/MarkerTest.java
@@ -1,395 +1,395 @@
/*******************************************************************************
* Copyright (c) 2010 Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Sonatype, Inc. - initial API and implementation
*******************************************************************************/
package org.eclipse.m2e.tests;
import java.util.List;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IncrementalProjectBuilder;
import org.eclipse.m2e.core.MavenPlugin;
import org.eclipse.m2e.core.core.IMavenConstants;
import org.eclipse.m2e.core.project.IMavenProjectFacade;
import org.eclipse.m2e.core.project.ResolverConfiguration;
import org.eclipse.m2e.tests.common.AbstractMavenProjectTestCase;
import org.eclipse.m2e.tests.common.WorkspaceHelpers;
public class MarkerTest extends AbstractMavenProjectTestCase {
@SuppressWarnings("restriction")
public void test() throws Exception {
// Import a project with bad pom.xml
IProject project = createExisting("markerTest", "projects/markers/testWorkflow");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNull("Expected null MavenProjectFacade", facade);
String expectedErrorMessage = "Project build error: Non-readable POM ";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_POM_LOADING_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the pom, introduce a configuration problem
copyContent(project, "pom_badConfiguration.xml", "pom.xml");
waitForJobsToComplete();
facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().getProject(project);
assertNotNull("Expected not null MavenProjectFacade", facade);
project = facade.getProject();
- expectedErrorMessage = "Unknown or missing lifecycle mapping (project packaging type=\"war\")";
+ expectedErrorMessage = "Unknown or missing lifecycle mapping (project packaging type=\"test-packaging-empty\")";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
- WorkspaceHelpers.assertLifecyclePackagingErrorMarkerAttributes(project, "war");
+ WorkspaceHelpers.assertLifecyclePackagingErrorMarkerAttributes(project, "test-packaging-empty");
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
// Fix the current configuration problem, introduce a new one
copyContent(project, "pom_badConfiguration1.xml", "pom.xml");
waitForJobsToComplete();
expectedErrorMessage = "Mojo execution not covered by lifecycle configuration: org.codehaus.modello:modello-maven-plugin:1.1:java {execution: standard} (maven lifecycle phase: generate-sources)";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
24 /*lineNumber of <goal>standard</goal>*/, project);
// Fix the current configuration problem, introduce a dependency problem
copyContent(project, "pom_badDependency.xml", "pom.xml");
waitForJobsToComplete();
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
expectedErrorMessage = "Missing artifact missing:missing:jar:0.0.0:compile";
List<IMarker> markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 2, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (jdt) The project cannot be built until build path errors are resolved
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 3, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the current dependency problem
copyContent(project, "pom_good.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker("org.eclipse.jdt.core.problem",
"The project cannot be built until build path errors are resolved", null /*lineNumber*/,
null /*resourceRelativePath*/, project);
// Building the project should fix the problem
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a fake maven build marker
project.createMarker(IMavenConstants.MARKER_BUILD_ID);
// Building the project should remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a maven build marker based on build participant exception
copyContent(project, "pom_buildException.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
expectedErrorMessage = "Exception: " + ThrowBuildExceptionProjectConfigurator.ERROR_MESSAGE;
IMarker marker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
// Verify that the marker is removed by a new build
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
IMarker newMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
assertFalse(marker.getId() == newMarker.getId());
}
public void testBuildContextWithOneProjectConfigurator() throws Exception {
IProject project = createExisting("markerTest", "projects/markers/testBuildContextWithOneProjectConfigurator");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNotNull("Expected not null MavenProjectFacade", facade);
WorkspaceHelpers.assertNoErrors(project);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
IMarker errorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
IMarker warningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
// An incremental build without interesting changes should not remove the markers
copyContent(project, AddMarkersProjectConfiguratorFoo.FILE_NAME, "x.txt");
project.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, monitor);
waitForJobsToComplete();
IMarker newErrorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
IMarker newWarningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
assertEquals(errorMarker.getId(), newErrorMarker.getId());
assertEquals(errorMarker.getAttribute(IMarker.MESSAGE, null), newErrorMarker.getAttribute(IMarker.MESSAGE, null));
assertEquals(warningMarker.getId(), newWarningMarker.getId());
assertEquals(warningMarker.getAttribute(IMarker.MESSAGE, null),
newWarningMarker.getAttribute(IMarker.MESSAGE, null));
// A full build should remove the markers
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
newErrorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
newWarningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
assertFalse(errorMarker.getId() == newErrorMarker.getId());
assertFalse(warningMarker.getId() == newWarningMarker.getId());
// An incremental build with interesting changes should remove the old markers
errorMarker = newErrorMarker;
warningMarker = newWarningMarker;
copyContent(project, "x.txt", AddMarkersProjectConfiguratorFoo.FILE_NAME);
project.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, monitor);
waitForJobsToComplete();
newErrorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
newWarningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
assertFalse(errorMarker.getId() == newErrorMarker.getId());
assertFalse(warningMarker.getId() == newWarningMarker.getId());
// A clean build should remove the markers
project.build(IncrementalProjectBuilder.CLEAN_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
}
public void testBuildContextWithTwoProjectConfigurators() throws Exception {
IProject project = createExisting("markerTest", "projects/markers/testBuildContextWithTwoProjectConfigurators");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNotNull("Expected not null MavenProjectFacade", facade);
WorkspaceHelpers.assertNoErrors(project);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
IMarker errorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
IMarker warningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
// An incremental build with interesting changes for only one of the project configurators should not remove the markers created by the other project configurator
copyContent(project, AddMarkersProjectConfiguratorFoo.FILE_NAME, AddMarkersProjectConfiguratorBar.FILE_NAME);
project.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, monitor);
waitForJobsToComplete();
List<IMarker> errorMarkers = WorkspaceHelpers.findErrorMarkers(project);
assertEquals(toString(errorMarkers), 2, errorMarkers.size());
List<IMarker> warningMarkers = WorkspaceHelpers.findWarningMarkers(project);
assertEquals(toString(warningMarkers), 2, errorMarkers.size());
// Verify that the old markers for AddMarkersProjectConfiguratorFoo where not removed
IMarker newErrorMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.ERROR_MESSAGE, AddMarkersProjectConfiguratorFoo.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
IMarker newWarningMarker = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorFoo.WARNING_MESSAGE, AddMarkersProjectConfiguratorFoo.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorFoo.FILE_NAME, project);
assertEquals(errorMarker.getId(), newErrorMarker.getId());
assertEquals(errorMarker.getAttribute(IMarker.MESSAGE, null), newErrorMarker.getAttribute(IMarker.MESSAGE, null));
assertEquals(warningMarker.getId(), newWarningMarker.getId());
assertEquals(warningMarker.getAttribute(IMarker.MESSAGE, null),
newWarningMarker.getAttribute(IMarker.MESSAGE, null));
// Verify that the new markers for AddMarkersProjectConfiguratorBar where created
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorBar.ERROR_MESSAGE, AddMarkersProjectConfiguratorBar.ERROR_LINE_NUMBER,
AddMarkersProjectConfiguratorBar.FILE_NAME, project);
WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfiguratorBar.WARNING_MESSAGE, AddMarkersProjectConfiguratorBar.WARNING_LINE_NUMBER,
AddMarkersProjectConfiguratorBar.FILE_NAME, project);
}
public void testBuildContextWithSameProjectConfiguratorTwice() throws Exception {
IProject project = createExisting("markerTest", "projects/markers/testBuildContextWithSameProjectConfiguratorTwice");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNotNull("Expected not null MavenProjectFacade", facade);
WorkspaceHelpers.assertNoErrors(project);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
List<IMarker> errorMarkers = WorkspaceHelpers.findErrorMarkers(project);
assertEquals(toString(errorMarkers), 2, errorMarkers.size());
List<IMarker> warningMarkers = WorkspaceHelpers.findWarningMarkers(project);
assertEquals(toString(warningMarkers), 2, errorMarkers.size());
String mojoExecutionKey0 = "org.apache.maven.plugins:maven-deploy-plugin:2.5:deploy:default-deploy";
IMarker errorMarker0 = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.ERROR_MESSAGE + " " + mojoExecutionKey0,
AddMarkersProjectConfigurator.ERROR_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
IMarker warningMarker0 = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.WARNING_MESSAGE + " " + mojoExecutionKey0,
AddMarkersProjectConfigurator.WARNING_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
String mojoExecutionKey1 = "org.apache.maven.plugins:maven-install-plugin:2.3.1:install:default-install";
IMarker errorMarker1 = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.ERROR_MESSAGE + " " + mojoExecutionKey1,
AddMarkersProjectConfigurator.ERROR_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
IMarker warningMarker1 = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.WARNING_MESSAGE + " " + mojoExecutionKey1,
AddMarkersProjectConfigurator.WARNING_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
// An incremental build should remove and re-create only the markers for the second mojo execution key
copyContent(project, AddMarkersProjectConfigurator.FILE_NAME, "x.txt");
project.build(IncrementalProjectBuilder.INCREMENTAL_BUILD, monitor);
waitForJobsToComplete();
errorMarkers = WorkspaceHelpers.findErrorMarkers(project);
assertEquals(toString(errorMarkers), 2, errorMarkers.size());
warningMarkers = WorkspaceHelpers.findWarningMarkers(project);
assertEquals(toString(warningMarkers), 2, errorMarkers.size());
IMarker newErrorMarker0 = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.ERROR_MESSAGE + " " + mojoExecutionKey0,
AddMarkersProjectConfigurator.ERROR_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
IMarker newWarningMarker0 = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.WARNING_MESSAGE + " " + mojoExecutionKey0,
AddMarkersProjectConfigurator.WARNING_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
IMarker newErrorMarker1 = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.ERROR_MESSAGE + " " + mojoExecutionKey1,
AddMarkersProjectConfigurator.ERROR_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
IMarker newWarningMarker1 = WorkspaceHelpers.assertWarningMarker(IMavenConstants.MARKER_BUILD_PARTICIPANT_ID,
AddMarkersProjectConfigurator.WARNING_MESSAGE + " " + mojoExecutionKey1,
AddMarkersProjectConfigurator.WARNING_LINE_NUMBER, AddMarkersProjectConfigurator.FILE_NAME, project);
assertEquals(errorMarker0.getId(), newErrorMarker0.getId());
assertEquals(warningMarker0.getId(), newWarningMarker0.getId());
assertFalse(errorMarker1.getId() == newErrorMarker1.getId());
assertFalse(warningMarker1.getId() == newWarningMarker1.getId());
}
// public void testSearchMarkers() throws Exception {
// IMavenProjectFacade facade = importMavenProject("projects/lifecyclemapping/lifecycleMappingMetadata",
// "testLifecycleMappingSpecifiedInMetadata/pom.xml");
// assertNotNull("Expected not null MavenProjectFacade", facade);
// IProject project = facade.getProject();
// WorkspaceHelpers.assertNoErrors(project);
// IResource resource = facade.getPom();
//
// int totalMarkers = 100;
// int percentForFind = 10;
// List<String> idsToFind = new ArrayList<String>();
// long start = System.currentTimeMillis();
// String markerType = IMavenConstants.MARKER_CONFIGURATION_ID;
// for(int i = 0; i < totalMarkers; i++ ) {
// IMarker marker = resource.createMarker(markerType);
// marker.setAttribute(IMarker.MESSAGE, "Some reasonably error message here. Or maybe a little bit longer.");
// marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
// marker.setAttribute(IMarker.TRANSIENT, true);
// String id = "org.apache.maven.plugins:maven-compiler-plugin:2.0.1:testCompile:standard:org.eclipse.m2e.jdt.internal.JavaProjectConfigurator"
// + System.currentTimeMillis() + System.nanoTime();
// marker.setAttribute("buildParticipant", id);
// if(i % percentForFind == 0) {
// idsToFind.add(id);
// }
// }
// System.err.println("Time to create " + totalMarkers + " markers: " + (System.currentTimeMillis() - start) + "ms");
// IMarker[] allMarkers = resource
// .findMarkers(markerType, false /*includeSubtypes*/, 0 /*depth*/);
// assertEquals(totalMarkers, allMarkers.length);
//
// start = System.currentTimeMillis();
// for(String idToFind : idsToFind) {
// int found = 0;
// allMarkers = resource
// .findMarkers(markerType, false /*includeSubtypes*/, 0 /*depth*/);
// for(IMarker marker : allMarkers) {
// String id = marker.getAttribute("buildParticipant", null);
// if(idToFind.equals(id)) {
// found++ ;
// }
// }
// assertEquals(1, found);
// }
// long elapsed = (System.currentTimeMillis() - start);
// System.err.println("Time to find " + idsToFind.size() + " markers: " + elapsed + "ms");
// System.err.println("Average time to find 1 marker: " + (elapsed / idsToFind.size()) + "ms");
// }
//
// public void testSearchMarkers1() throws Exception {
// IMavenProjectFacade facade = importMavenProject("projects/lifecyclemapping/lifecycleMappingMetadata",
// "testLifecycleMappingSpecifiedInMetadata/pom.xml");
// assertNotNull("Expected not null MavenProjectFacade", facade);
// IProject project = facade.getProject();
// WorkspaceHelpers.assertNoErrors(project);
// IResource resource = facade.getPom();
//
// int totalMarkers = 10000;
// int percentForFind = 10;
// List<String> idsToFind = new ArrayList<String>();
// long start = System.currentTimeMillis();
// String markerType = IMavenConstants.MARKER_CONFIGURATION_ID;
// for(int i = 0; i < totalMarkers; i++ ) {
// String id = "org.apache.maven.plugins:maven-compiler-plugin:2.0.1:testCompile:standard:org.eclipse.m2e.jdt.internal.JavaProjectConfigurator"
// + System.currentTimeMillis() + System.nanoTime();
// IMarker marker = resource.createMarker(markerType + "." + id);
// marker.setAttribute(IMarker.MESSAGE, "Some reasonably error message here. Or maybe a little bit longer.");
// marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
// marker.setAttribute(IMarker.TRANSIENT, true);
// if(i % percentForFind == 0) {
// idsToFind.add(id);
// }
// }
// System.err.println("Time to create " + totalMarkers + " markers: " + (System.currentTimeMillis() - start) + "ms");
//// IMarker[] allMarkers = resource.findMarkers(markerType, true /*includeSubtypes*/, 0 /*depth*/);
//// assertEquals(totalMarkers, allMarkers.length);
//
// start = System.currentTimeMillis();
// for(String idToFind : idsToFind) {
// IMarker[] allMarkers = resource
// .findMarkers(markerType + "." + idToFind, false /*includeSubtypes*/, 0 /*depth*/);
// assertEquals(1, allMarkers.length);
// }
// long elapsed = (System.currentTimeMillis() - start);
// System.err.println("Time to find " + idsToFind.size() + " markers: " + elapsed + "ms");
// System.err.println("Average time to find 1 marker: " + (elapsed / idsToFind.size()) + "ms");
// }
//
// protected IMavenProjectFacade importMavenProject(String basedir, String pomName) throws Exception {
// ResolverConfiguration configuration = new ResolverConfiguration();
// IProject[] project = importProjects(basedir, new String[] {pomName}, configuration);
// waitForJobsToComplete();
//
// MavenProjectManager mavenProjectManager = MavenPlugin.getDefault().getMavenProjectManager();
// return mavenProjectManager.create(project[0], monitor);
// }
}
| false | true | public void test() throws Exception {
// Import a project with bad pom.xml
IProject project = createExisting("markerTest", "projects/markers/testWorkflow");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNull("Expected null MavenProjectFacade", facade);
String expectedErrorMessage = "Project build error: Non-readable POM ";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_POM_LOADING_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the pom, introduce a configuration problem
copyContent(project, "pom_badConfiguration.xml", "pom.xml");
waitForJobsToComplete();
facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().getProject(project);
assertNotNull("Expected not null MavenProjectFacade", facade);
project = facade.getProject();
expectedErrorMessage = "Unknown or missing lifecycle mapping (project packaging type=\"war\")";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
WorkspaceHelpers.assertLifecyclePackagingErrorMarkerAttributes(project, "war");
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
// Fix the current configuration problem, introduce a new one
copyContent(project, "pom_badConfiguration1.xml", "pom.xml");
waitForJobsToComplete();
expectedErrorMessage = "Mojo execution not covered by lifecycle configuration: org.codehaus.modello:modello-maven-plugin:1.1:java {execution: standard} (maven lifecycle phase: generate-sources)";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
24 /*lineNumber of <goal>standard</goal>*/, project);
// Fix the current configuration problem, introduce a dependency problem
copyContent(project, "pom_badDependency.xml", "pom.xml");
waitForJobsToComplete();
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
expectedErrorMessage = "Missing artifact missing:missing:jar:0.0.0:compile";
List<IMarker> markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 2, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (jdt) The project cannot be built until build path errors are resolved
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 3, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the current dependency problem
copyContent(project, "pom_good.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker("org.eclipse.jdt.core.problem",
"The project cannot be built until build path errors are resolved", null /*lineNumber*/,
null /*resourceRelativePath*/, project);
// Building the project should fix the problem
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a fake maven build marker
project.createMarker(IMavenConstants.MARKER_BUILD_ID);
// Building the project should remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a maven build marker based on build participant exception
copyContent(project, "pom_buildException.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
expectedErrorMessage = "Exception: " + ThrowBuildExceptionProjectConfigurator.ERROR_MESSAGE;
IMarker marker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
// Verify that the marker is removed by a new build
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
IMarker newMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
assertFalse(marker.getId() == newMarker.getId());
}
| public void test() throws Exception {
// Import a project with bad pom.xml
IProject project = createExisting("markerTest", "projects/markers/testWorkflow");
waitForJobsToComplete();
assertNotNull("Expected not null project", project);
IMavenProjectFacade facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().create(project, monitor);
assertNull("Expected null MavenProjectFacade", facade);
String expectedErrorMessage = "Project build error: Non-readable POM ";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_POM_LOADING_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the pom, introduce a configuration problem
copyContent(project, "pom_badConfiguration.xml", "pom.xml");
waitForJobsToComplete();
facade = MavenPlugin.getDefault().getMavenProjectManagerImpl().getProject(project);
assertNotNull("Expected not null MavenProjectFacade", facade);
project = facade.getProject();
expectedErrorMessage = "Unknown or missing lifecycle mapping (project packaging type=\"test-packaging-empty\")";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
WorkspaceHelpers.assertLifecyclePackagingErrorMarkerAttributes(project, "test-packaging-empty");
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
7 /*lineNumber of <packaging> element*/, project);
// Fix the current configuration problem, introduce a new one
copyContent(project, "pom_badConfiguration1.xml", "pom.xml");
waitForJobsToComplete();
expectedErrorMessage = "Mojo execution not covered by lifecycle configuration: org.codehaus.modello:modello-maven-plugin:1.1:java {execution: standard} (maven lifecycle phase: generate-sources)";
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_CONFIGURATION_ID, expectedErrorMessage,
24 /*lineNumber of <goal>standard</goal>*/, project);
// Fix the current configuration problem, introduce a dependency problem
copyContent(project, "pom_badDependency.xml", "pom.xml");
waitForJobsToComplete();
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
expectedErrorMessage = "Missing artifact missing:missing:jar:0.0.0:compile";
List<IMarker> markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 2, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Building the project should not remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
markers = WorkspaceHelpers.findErrorMarkers(project);
// (jdt) The container 'Maven Dependencies' references non existing library ...missing/missing/0.0.0/missing-0.0.0.jar'
// (jdt) The project cannot be built until build path errors are resolved
// (maven) Missing artifact missing:missing:jar:0.0.0:compile
assertEquals(WorkspaceHelpers.toString(markers), 3, markers.size());
WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_DEPENDENCY_ID, expectedErrorMessage, 1 /*lineNumber*/,
project);
// Fix the current dependency problem
copyContent(project, "pom_good.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertErrorMarker("org.eclipse.jdt.core.problem",
"The project cannot be built until build path errors are resolved", null /*lineNumber*/,
null /*resourceRelativePath*/, project);
// Building the project should fix the problem
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a fake maven build marker
project.createMarker(IMavenConstants.MARKER_BUILD_ID);
// Building the project should remove the marker
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
// Add a maven build marker based on build participant exception
copyContent(project, "pom_buildException.xml", "pom.xml");
waitForJobsToComplete();
WorkspaceHelpers.assertNoErrors(project);
MavenPlugin.getDefault().getProjectConfigurationManager()
.updateProjectConfiguration(project, new ResolverConfiguration(), monitor);
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
expectedErrorMessage = "Exception: " + ThrowBuildExceptionProjectConfigurator.ERROR_MESSAGE;
IMarker marker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
// Verify that the marker is removed by a new build
project.build(IncrementalProjectBuilder.FULL_BUILD, monitor);
waitForJobsToComplete();
IMarker newMarker = WorkspaceHelpers.assertErrorMarker(IMavenConstants.MARKER_BUILD_ID, expectedErrorMessage,
null /*lineNumber*/, project);
assertFalse(marker.getId() == newMarker.getId());
}
|
diff --git a/src/com/wolvencraft/prison/mines/cmd/WarningCommand.java b/src/com/wolvencraft/prison/mines/cmd/WarningCommand.java
index 7bd30f8..2b042ee 100644
--- a/src/com/wolvencraft/prison/mines/cmd/WarningCommand.java
+++ b/src/com/wolvencraft/prison/mines/cmd/WarningCommand.java
@@ -1,100 +1,100 @@
package com.wolvencraft.prison.mines.cmd;
import java.util.List;
import org.bukkit.ChatColor;
import com.wolvencraft.prison.mines.PrisonMine;
import com.wolvencraft.prison.mines.mine.Mine;
import com.wolvencraft.prison.mines.util.Message;
import com.wolvencraft.prison.mines.util.Util;
public class WarningCommand implements BaseCommand {
public boolean run(String[] args) {
if(args.length == 1) {
getHelp();
return true;
}
Mine curMine = PrisonMine.getCurMine();
if(curMine == null) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_MINENOTSELECTED);
return false;
}
if(args[1].equalsIgnoreCase("toggle")) {
if(args.length != 2) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
if(curMine.getWarned()) {
curMine.setWarned(false);
Message.sendFormattedMine("Reset warnings are " + ChatColor.RED + "off");
}
else {
curMine.setWarned(true);
Message.sendFormattedMine("Reset warnings are " + ChatColor.GREEN + "on");
}
} else if(args[1].equalsIgnoreCase("add") || args[1].equalsIgnoreCase("+")) {
if(args.length != 3) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[2]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
if(time > curMine.getResetPeriod()) {
Message.sendFormattedError("Time cannot be set to a value greater then the reset time");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
warnList.add(time);
String parsedTime = Util.parseSeconds(time);
Message.sendFormattedMine("Mine will now send warnings " + ChatColor.GOLD + parsedTime + ChatColor.WHITE + " minute(s) before the reset");
} else if(args[2].equalsIgnoreCase("remove") || args[2].equalsIgnoreCase("-")) {
- if(args.length != 4) {
+ if(args.length != 3) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[3]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
int index = warnList.indexOf(time);
if(index == -1) {
Message.sendFormattedError("'" + curMine.getId() + "' does not send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
return false;
}
warnList.remove(index);
Message.sendFormattedMine("Mine will no longer send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
}
else {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_COMMAND);
return false;
}
return curMine.saveFile();
}
public void getHelp() {
Message.formatHeader(20, "Timer");
Message.formatHelp("warning", "toggle", "Toggles reset warnings on and off");
Message.formatHelp("warning", "add <time>", "Adds a warning at time specified");
Message.formatHelp("warning", "remove <time>", "Adds a warning at time specified");
return;
}
public void getHelpLine() { Message.formatHelp("warning", "", "Shows reset warning options", "prison.mine.edit"); }
}
| true | true | public boolean run(String[] args) {
if(args.length == 1) {
getHelp();
return true;
}
Mine curMine = PrisonMine.getCurMine();
if(curMine == null) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_MINENOTSELECTED);
return false;
}
if(args[1].equalsIgnoreCase("toggle")) {
if(args.length != 2) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
if(curMine.getWarned()) {
curMine.setWarned(false);
Message.sendFormattedMine("Reset warnings are " + ChatColor.RED + "off");
}
else {
curMine.setWarned(true);
Message.sendFormattedMine("Reset warnings are " + ChatColor.GREEN + "on");
}
} else if(args[1].equalsIgnoreCase("add") || args[1].equalsIgnoreCase("+")) {
if(args.length != 3) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[2]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
if(time > curMine.getResetPeriod()) {
Message.sendFormattedError("Time cannot be set to a value greater then the reset time");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
warnList.add(time);
String parsedTime = Util.parseSeconds(time);
Message.sendFormattedMine("Mine will now send warnings " + ChatColor.GOLD + parsedTime + ChatColor.WHITE + " minute(s) before the reset");
} else if(args[2].equalsIgnoreCase("remove") || args[2].equalsIgnoreCase("-")) {
if(args.length != 4) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[3]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
int index = warnList.indexOf(time);
if(index == -1) {
Message.sendFormattedError("'" + curMine.getId() + "' does not send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
return false;
}
warnList.remove(index);
Message.sendFormattedMine("Mine will no longer send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
}
else {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_COMMAND);
return false;
}
return curMine.saveFile();
}
| public boolean run(String[] args) {
if(args.length == 1) {
getHelp();
return true;
}
Mine curMine = PrisonMine.getCurMine();
if(curMine == null) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_MINENOTSELECTED);
return false;
}
if(args[1].equalsIgnoreCase("toggle")) {
if(args.length != 2) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
if(curMine.getWarned()) {
curMine.setWarned(false);
Message.sendFormattedMine("Reset warnings are " + ChatColor.RED + "off");
}
else {
curMine.setWarned(true);
Message.sendFormattedMine("Reset warnings are " + ChatColor.GREEN + "on");
}
} else if(args[1].equalsIgnoreCase("add") || args[1].equalsIgnoreCase("+")) {
if(args.length != 3) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[2]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
if(time > curMine.getResetPeriod()) {
Message.sendFormattedError("Time cannot be set to a value greater then the reset time");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
warnList.add(time);
String parsedTime = Util.parseSeconds(time);
Message.sendFormattedMine("Mine will now send warnings " + ChatColor.GOLD + parsedTime + ChatColor.WHITE + " minute(s) before the reset");
} else if(args[2].equalsIgnoreCase("remove") || args[2].equalsIgnoreCase("-")) {
if(args.length != 3) {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_ARGUMENTS);
return false;
}
int time = Util.parseTime(args[3]);
if(time <= 0) {
Message.sendFormattedError("Invalid time provided");
return false;
}
List<Integer> warnList = curMine.getWarningTimes();
int index = warnList.indexOf(time);
if(index == -1) {
Message.sendFormattedError("'" + curMine.getId() + "' does not send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
return false;
}
warnList.remove(index);
Message.sendFormattedMine("Mine will no longer send a warning " + ChatColor.GOLD + Util.parseSeconds(time) + ChatColor.WHITE + " minute(s) before the reset");
}
else {
Message.sendFormattedError(PrisonMine.getLanguage().ERROR_COMMAND);
return false;
}
return curMine.saveFile();
}
|
diff --git a/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasks/ui/TaskActivityMonitor.java b/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasks/ui/TaskActivityMonitor.java
index 0df135fb6..7bc409f59 100644
--- a/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasks/ui/TaskActivityMonitor.java
+++ b/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/internal/tasks/ui/TaskActivityMonitor.java
@@ -1,134 +1,137 @@
/*******************************************************************************
* Copyright (c) 2004, 2007 Mylyn project committers and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*******************************************************************************/
package org.eclipse.mylyn.internal.tasks.ui;
import java.util.List;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.mylyn.commons.core.StatusHandler;
import org.eclipse.mylyn.context.core.AbstractContextListener;
import org.eclipse.mylyn.context.core.IInteractionElement;
import org.eclipse.mylyn.internal.context.core.InteractionContextManager;
import org.eclipse.mylyn.internal.tasks.core.AbstractTask;
import org.eclipse.mylyn.internal.tasks.core.ITasksCoreConstants;
import org.eclipse.mylyn.internal.tasks.core.TaskActivityManager;
import org.eclipse.mylyn.internal.tasks.core.TaskList;
import org.eclipse.mylyn.monitor.core.InteractionEvent;
import org.eclipse.mylyn.tasks.core.ITask;
/**
* Monitors task activity and maintains task activation history
*
* @author Robert Elves
* @author Steffen Pingel
* @since 3.0
*/
@SuppressWarnings("restriction")
public class TaskActivityMonitor {
private final InteractionContextManager contextManager;
private final TaskActivityManager taskActivityManager;
private final TaskList taskList;
private final AbstractContextListener CONTEXT_LISTENER = new AbstractContextListener() {
@Override
public void interestChanged(List<IInteractionElement> elements) {
List<InteractionEvent> events = contextManager.getActivityMetaContext().getInteractionHistory();
InteractionEvent event = events.get(events.size() - 1);
parseInteractionEvent(event, false);
}
};
private ActivityExternalizationParticipant externalizationParticipant;
public TaskActivityMonitor(TaskActivityManager taskActivityManager, InteractionContextManager contextManager) {
this.taskActivityManager = taskActivityManager;
this.contextManager = contextManager;
this.taskList = TasksUiPlugin.getTaskList();
}
public void start() {
contextManager.addActivityMetaContextListener(CONTEXT_LISTENER);
}
/** public for testing */
public boolean parseInteractionEvent(InteractionEvent event, boolean isReloading) {
try {
if (event.getKind().equals(InteractionEvent.Kind.COMMAND)) {
if ((event.getDelta().equals(InteractionContextManager.ACTIVITY_DELTA_ACTIVATED))) {
AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
if (activatedTask != null) {
taskActivityManager.getTaskActivationHistory().addTask(activatedTask);
return true;
}
}
} else if (event.getKind().equals(InteractionEvent.Kind.ATTENTION)) {
if ((event.getDelta().equals("added") || event.getDelta().equals("add"))) {
- if (event.getStructureKind().equals(InteractionContextManager.ACTIVITY_STRUCTUREKIND_WORKINGSET)) {
- taskActivityManager.addWorkingSetElapsedTime(event.getStructureHandle(), event.getDate(),
- event.getEndDate());
- if (!isReloading) {
- externalizationParticipant.setDirty(true);
- // save not requested for working set time updates so...
- externalizationParticipant.elapsedTimeUpdated(null, 0);
- }
- } else {
- AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
- if (activatedTask != null) {
- taskActivityManager.addElapsedTime(activatedTask, event.getDate(), event.getEndDate());
+ if (event.getDate().getTime() > 0 && event.getEndDate().getTime() > 0) {
+ if (event.getStructureKind()
+ .equals(InteractionContextManager.ACTIVITY_STRUCTUREKIND_WORKINGSET)) {
+ taskActivityManager.addWorkingSetElapsedTime(event.getStructureHandle(), event.getDate(),
+ event.getEndDate());
+ if (!isReloading) {
+ externalizationParticipant.setDirty(true);
+ // save not requested for working set time updates so...
+ externalizationParticipant.elapsedTimeUpdated(null, 0);
+ }
+ } else {
+ AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
+ if (activatedTask != null) {
+ taskActivityManager.addElapsedTime(activatedTask, event.getDate(), event.getEndDate());
+ }
}
}
} else if (event.getDelta().equals("removed")) {
ITask task = taskList.getTask(event.getStructureHandle());
if (task != null) {
taskActivityManager.removeElapsedTime(task, event.getDate(), event.getEndDate());
}
}
}
} catch (Throwable t) {
StatusHandler.log(new Status(IStatus.ERROR, ITasksCoreConstants.ID_PLUGIN,
"Error parsing interaction event", t));
}
return false;
}
public void stop() {
contextManager.removeActivityMetaContextListener(CONTEXT_LISTENER);
}
public void reloadActivityTime() {
taskActivityManager.clearActivity();
List<InteractionEvent> events = contextManager.getActivityMetaContext().getInteractionHistory();
for (InteractionEvent event : events) {
parseInteractionEvent(event, true);
}
}
/**
* Returns the task corresponding to the interaction event history item at the specified position
*/
protected ITask getHistoryTaskAt(int pos) {
InteractionEvent event = contextManager.getActivityMetaContext().getInteractionHistory().get(pos);
if (event.getDelta().equals(InteractionContextManager.ACTIVITY_DELTA_ACTIVATED)) {
return TasksUiPlugin.getTaskList().getTask(event.getStructureHandle());
} else {
return null;
}
}
public void setExternalizationParticipant(ActivityExternalizationParticipant participant) {
this.externalizationParticipant = participant;
}
}
| true | true | public boolean parseInteractionEvent(InteractionEvent event, boolean isReloading) {
try {
if (event.getKind().equals(InteractionEvent.Kind.COMMAND)) {
if ((event.getDelta().equals(InteractionContextManager.ACTIVITY_DELTA_ACTIVATED))) {
AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
if (activatedTask != null) {
taskActivityManager.getTaskActivationHistory().addTask(activatedTask);
return true;
}
}
} else if (event.getKind().equals(InteractionEvent.Kind.ATTENTION)) {
if ((event.getDelta().equals("added") || event.getDelta().equals("add"))) {
if (event.getStructureKind().equals(InteractionContextManager.ACTIVITY_STRUCTUREKIND_WORKINGSET)) {
taskActivityManager.addWorkingSetElapsedTime(event.getStructureHandle(), event.getDate(),
event.getEndDate());
if (!isReloading) {
externalizationParticipant.setDirty(true);
// save not requested for working set time updates so...
externalizationParticipant.elapsedTimeUpdated(null, 0);
}
} else {
AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
if (activatedTask != null) {
taskActivityManager.addElapsedTime(activatedTask, event.getDate(), event.getEndDate());
}
}
} else if (event.getDelta().equals("removed")) {
ITask task = taskList.getTask(event.getStructureHandle());
if (task != null) {
taskActivityManager.removeElapsedTime(task, event.getDate(), event.getEndDate());
}
}
}
} catch (Throwable t) {
StatusHandler.log(new Status(IStatus.ERROR, ITasksCoreConstants.ID_PLUGIN,
"Error parsing interaction event", t));
}
return false;
}
| public boolean parseInteractionEvent(InteractionEvent event, boolean isReloading) {
try {
if (event.getKind().equals(InteractionEvent.Kind.COMMAND)) {
if ((event.getDelta().equals(InteractionContextManager.ACTIVITY_DELTA_ACTIVATED))) {
AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
if (activatedTask != null) {
taskActivityManager.getTaskActivationHistory().addTask(activatedTask);
return true;
}
}
} else if (event.getKind().equals(InteractionEvent.Kind.ATTENTION)) {
if ((event.getDelta().equals("added") || event.getDelta().equals("add"))) {
if (event.getDate().getTime() > 0 && event.getEndDate().getTime() > 0) {
if (event.getStructureKind()
.equals(InteractionContextManager.ACTIVITY_STRUCTUREKIND_WORKINGSET)) {
taskActivityManager.addWorkingSetElapsedTime(event.getStructureHandle(), event.getDate(),
event.getEndDate());
if (!isReloading) {
externalizationParticipant.setDirty(true);
// save not requested for working set time updates so...
externalizationParticipant.elapsedTimeUpdated(null, 0);
}
} else {
AbstractTask activatedTask = taskList.getTask(event.getStructureHandle());
if (activatedTask != null) {
taskActivityManager.addElapsedTime(activatedTask, event.getDate(), event.getEndDate());
}
}
}
} else if (event.getDelta().equals("removed")) {
ITask task = taskList.getTask(event.getStructureHandle());
if (task != null) {
taskActivityManager.removeElapsedTime(task, event.getDate(), event.getEndDate());
}
}
}
} catch (Throwable t) {
StatusHandler.log(new Status(IStatus.ERROR, ITasksCoreConstants.ID_PLUGIN,
"Error parsing interaction event", t));
}
return false;
}
|
diff --git a/src/main/us/exultant/ahs/test/TestCase.java b/src/main/us/exultant/ahs/test/TestCase.java
index e164a98..8d72a41 100644
--- a/src/main/us/exultant/ahs/test/TestCase.java
+++ b/src/main/us/exultant/ahs/test/TestCase.java
@@ -1,450 +1,452 @@
/*
* Copyright 2010, 2011 Eric Myhre <http://exultant.us>
*
* This file is part of AHSlib.
*
* AHSlib is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, version 3 of the License, or
* (at the original copyright holder's option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package us.exultant.ahs.test;
import us.exultant.ahs.log.*;
import us.exultant.ahs.util.*;
import java.util.*;
import java.util.concurrent.*;
public abstract class TestCase implements Runnable {
/**
* @param $log
* Fatal failures that cause the entire case to fail to complete
* are logged at ERROR level; failed assertions without a unit are
* logged at WARN level; units that pass as logged at INFO level,
* and confirmations of individually passed assertions (if enabled)
* are logged at DEBUG level.
* @param $enableConfirmation
*/
public TestCase(Logger $log, boolean $enableConfirmation) {
this.$log = $log;
this.$confirm = $enableConfirmation;
}
/**
* By default, this will always attempt to call {@link System#exit(int)} at the
* end of running tests, exiting with 0 if all tests pass, 4 if any units have
* failed, and 5 if any unit failed catastrophically (i.e. the entire case was not
* completed). This behavior and/or those values can be overridden by overriding
* the {@link #succeeded()}, {@link #failed()}, and {@link #aborted()} methods,
* respectively.
*/
public synchronized void run() {
List<Unit> $units = getUnits(); // list is assumed immutable on pain of death or idiocy
$numUnits = $units.size();
$numUnitsRun = 0;
$numUnitsPassed = 0;
$numUnitsFailed = 0;
for (int $i = 0; $i < $units.size(); $i++) {
Unit $unit = $units.get($i);
if ($unit == null) continue;
try {
resetFailures();
$log.info(this, "TEST UNIT "+$unit.getName()+" STARTING...");
$numUnitsRun++;
$unit.call();
if ($unit.expectExceptionType() != null) {
$numUnitsFailed++;
$log.error(this.getClass(), "EXPECTED EXCEPTION; TEST CASE ABORTED.");
aborted();
}
if ($unitFailures == 0) {
$numUnitsPassed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
$numUnitsFailed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" FAILED (WITH "+$unitFailures+" FAILURES)!\n");
}
} catch (AssertionFatal $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
} catch (AssertionFailed $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "TEST UNIT "+$unit.getName()+" ABORTED.", $e);
} catch (Throwable $e) {
- $numUnitsFailed++;
if ($unit.expectExceptionType() != null) {
// some kind of exception was expected.
if ($unit.expectExceptionType().isAssignableFrom($e.getClass())) {
// and it was this kind that was expected, so this is good.
+ $numUnitsPassed++;
assertInstanceOf($unit.expectExceptionType(), $e); // generates a normal confirmation message
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
// and it wasn't this kind. this represents fatal failure.
+ $numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
} else {
// no exception was expected. any exception represents fatal failure.
+ $numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
}
}
if ($numUnitsFailed > 0)
failed();
else
succeeded();
}
/**
* <p>
* Called when the entire test case finished with all units passing successfully.
* Default behavior is printing {@link #preExitMessage()} to stdout followed by
* forceful termination of the program via {@link System#exit(int)} with an exit
* code of 0.
* </p>
*/
protected void succeeded() {
System.out.println(preExitMessage());
System.exit(0);
}
/**
* <p>
* Called when the entire test case finished, but at least one unit did not pass
* successfully. Default behavior is printing {@link #preExitMessage()} to stdout
* followed by forceful termination of the program via {@link System#exit(int)}
* with an exit code of 4.
* </p>
*/
protected void failed() {
System.out.println(preExitMessage());
System.exit(4);
}
/**
* <p>
* Called when the entire test case is aborted (i.e. a unit throws an unexpected
* exception or AssertionFatal). Default behavior is printing
* {@link #preExitMessage()} to stdout followed byis forceful termination of the
* program via {@link System#exit(int)} with an exit code of 5.
* </p>
*
* <p>
* Note that the entire test case is <b>not</b> considered aborted when a single
* unit of the case fails or or aborted, and as such this method will not be
* called in that situation ({@link #failed()} will be).
* </p>
*/
protected void aborted() {
System.out.println(preExitMessage());
System.exit(5);
}
protected String preExitMessage() {
return "{\"#\":\"TESTCASE\",\n"+
" \"numUnits\":"+$numUnits+",\n"+
" \"numUnitsRun\":"+$numUnitsRun+",\n"+
" \"numUnitsPassed\":"+$numUnitsPassed+",\n"+
" \"numUnitsFailed\":"+$numUnitsFailed+"\n"+
"}";
}
protected final Logger $log;
private boolean $confirm;
private int $unitFailures;
private int $numUnits;
private int $numUnitsRun;
private int $numUnitsPassed;
private int $numUnitsFailed;
public abstract List<Unit> getUnits();
/**
* <p>
* Each Unit in a TestCase contains a coherent set of assertions (or just one
* assertion) preceeded by code to set up the test. The class name of an instance
* of Unit is used when logging the successful passing of a Unit and so use of
* anonymous subclasses of Unit is not advised.
* </p>
*
* <p>
* Any object returned by the {@link #call()} method is ignored by TestCase, so
* it's typically appropriate to act as if Unit actually implemented
* <tt>Callable<{@link Void}></tt>. (The return type of Object is allowed in
* case the client cares to compose their units in odd ways, but doing so is not
* recommended.)
* </p>
*/
public abstract class Unit implements Callable<Object> {
/**
* If this returns null, any exception thrown from the {@link #call()}
* method results in failure of the Unit and aborting of all further Units
* in the entire Case. Otherwise, if this method is overriden to return a
* type, an exception <i>must</i> be thrown from the call method that is
* instanceof that type, or the Unit fails and all further Units in the
* entire Case are aborted.
*/
public <$T extends Throwable> Class<$T> expectExceptionType() { return null; }
// this method often seems to cause warnings about unchecked conversion in subclasses even when the return type is obviously legitimate, but i'm unsure of why.
public void breakIfFailed() throws AssertionFailed {
if ($unitFailures > 0) throw new AssertionFailed("breaking: "+$unitFailures+" failures.");
}
public void breakCaseIfFailed() throws AssertionFatal {
if ($unitFailures > 0) throw new AssertionFatal("breaking case: "+$unitFailures+" failures.");
}
public final String getName() {
String[] $arrg = Primitives.PATTERN_DOT.split(getClass().getCanonicalName());
return $arrg[$arrg.length-1];
}
}
protected void resetFailures() {
$unitFailures = 0;
}
// using autoboxing on primitives as much as these message functions do bothers me but it does save me a helluva lot of lines of code here and i am assuming you're not using any assertions inside of terribly tight loops (or if you are, you're eiter not using confirmation or not failing hundreds of thousands of times).
// it might be a clarity enhancement to do quotation marks around the actual and expected values depending on type, though, which i don't do right now.
static String messageFail(String $label, Object $expected, Object $actual) {
if ($label == null)
return "assertion failed -- expected " + $expected + " != " + $actual + " actual.";
else
return "assertion \"" + $label + "\" failed -- expected " + $expected + " != " + $actual + " actual.";
}
static String messagePass(String $label, Object $expected, Object $actual) {
if ($label == null)
return "assertion passed -- expected " + $expected + " == " + $actual + " actual.";
else
return "assertion \"" + $label + "\" passed -- expected " + $expected + " == " + $actual + " actual.";
} // i'm not recycling code in the above two because i think someday i might do some alignment stuff, in which case the above become more complicated cases.
static String messageFailNot(String $label, Object $expected, Object $actual) {
if ($label == null)
return "assertion failed -- unexpected " + $expected + " == " + $actual + " actual.";
else
return "assertion \"" + $label + "\" failed -- unexpected " + $expected + " == " + $actual + " actual.";
}
static String messagePassNot(String $label, Object $expected, Object $actual) {
if ($label == null)
return "assertion passed -- expected " + $expected + " != " + $actual + " actual.";
else
return "assertion \"" + $label + "\" passed -- expected " + $expected + " != " + $actual + " actual.";
}
static String messageFail(String $label, String $message) {
if ($label == null)
return "assertion failed -- " + $message;
else
return "assertion \"" + $label + "\" failed -- " +$message;
}
static String messagePass(String $label, String $message) {
if ($label == null)
return "assertion passed -- " + $message;
else
return "assertion \"" + $label + "\" passed -- " + $message;
}
// note that failure messages get wrapped in exceptions and then given to the logger (with a constant message of "assertion failed")
// whereas success messages get passed to the logger as actual messages (with no exception attached).
// this... might be a poor inconsistency, since i could see wanting to be able to report line numbers of successes outloud as well.
////////////////
// BOOLEAN
////////////////
public boolean assertTrue(boolean $bool) {
return assertEquals(null, true, $bool);
}
public boolean assertFalse(boolean $bool) {
return assertEquals(null, false, $bool);
}
public boolean assertEquals(boolean $expected, boolean $actual) {
return assertEquals(null, $expected, $actual);
}
public boolean assertTrue(String $label, boolean $bool) {
return assertEquals($label, true, $bool);
}
public boolean assertFalse(String $label, boolean $bool) {
return assertEquals($label, false, $bool);
}
public boolean assertEquals(String $label, boolean $expected, boolean $actual) {
if ($expected != $actual) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, $expected, $actual)));
return false;
}
if ($confirm) $log.debug(this.getClass(), messagePass($label, $expected, $actual));
return true;
}
////////////////
// Object
////////////////
public boolean assertSame(Object $expected, Object $actual) {
return assertSame(null, $expected, $actual);
}
public boolean assertSame(String $label, Object $expected, Object $actual) {
if ($expected != $actual) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, $expected, $actual)));
return false;
}
if ($confirm) $log.debug(this.getClass(), messagePass($label, $expected, $actual));
return true;
}
public boolean assertNotSame(Object $expected, Object $actual) {
return assertNotSame(null, $expected, $actual);
}
public boolean assertNotSame(String $label, Object $expected, Object $actual) {
if ($expected == $actual) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFailNot($label, $expected, $actual)));
return false;
}
if ($confirm) $log.debug(this.getClass(), messagePassNot($label, $expected, $actual));
return true;
}
public boolean assertNull(Object $actual) {
return assertSame(null, null, $actual);
}
public boolean assertNull(String $label, Object $actual) {
return assertSame($label, null, $actual);
}
public boolean assertEquals(Object $expected, Object $actual) {
return assertEquals(null, $expected, $actual);
}
public boolean assertEquals(String $label, Object $expected, Object $actual) {
if (!assertEqualsHelper($expected, $actual)) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, $expected, $actual)));
return false;
}
if ($confirm) $log.debug(this.getClass(), messagePass($label, $expected, $actual));
return true;
}
private boolean assertEqualsHelper(Object $expected, Object $actual) {
if ($expected == null) return ($actual == null);
return $expected.equals($actual);
}
public boolean assertInstanceOf(Class<?> $klass, Object $obj) {
return assertInstanceOf(null, $klass, $obj);
}
public boolean assertInstanceOf(String $label, Class<?> $klass, Object $obj) {
if ($obj == null) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, "null is never an instance of anything, and certainly not "+$klass+".")));
return false;
}
try {
$klass.cast($obj);
if ($confirm) $log.debug(this.getClass(), messagePass($label, "\""+$obj.getClass().getCanonicalName()+"\" is an instance of \""+$klass.getCanonicalName()+"\""));
return true;
} catch (ClassCastException $e) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, $e.getMessage()+".")));
return false;
}
}
////////////////
// String
////////////////
// there's not actually a dang thing special about these, i just want the api itself to reassure developers that yes, strings can be asserted on and nothing weird happens.
public boolean assertEquals(String $expected, String $actual) {
return assertEquals(null, (Object)$expected, (Object)$actual);
}
public boolean assertEquals(String $label, String $expected, String $actual) {
return assertEquals($label, (Object)$expected, (Object)$actual);
}
////////////////
// INT
////////////////
public boolean assertEquals(int $expected, int $actual) {
return assertEquals(null, $expected, $actual);
}
public boolean assertEquals(String $label, int $expected, int $actual) {
if ($expected != $actual) {
$unitFailures++;
$log.warn(this.getClass(), new AssertionFailed(messageFail($label, $expected, $actual)));
return false;
}
if ($confirm) $log.debug(this.getClass(), messagePass($label, $expected, $actual));
return true;
}
////////////////
// BYTE
////////////////
public boolean assertEquals(byte[] $expected, byte[] $actual) {
return assertEquals(null, $expected, $actual);
}
public boolean assertEquals(String $label, byte[] $expected, byte[] $actual) {
return assertEquals($label, Strings.toHex($expected), Strings.toHex($actual));
}
////////////////
// CHAR
////////////////
public boolean assertEquals(char[] $expected, char[] $actual) {
return assertEquals(null, $expected, $actual);
}
public boolean assertEquals(String $label, char[] $expected, char[] $actual) {
return assertEquals($label, Arr.toString($expected), Arr.toString($actual));
}
public static class AssertionFailed extends Error {
public AssertionFailed() { super(); }
public AssertionFailed(String $arg0) { super($arg0); }
public AssertionFailed(Throwable $arg0) { super($arg0); }
public AssertionFailed(String $arg0, Throwable $arg1) { super($arg0, $arg1); }
}
public static class AssertionFatal extends AssertionFailed {
public AssertionFatal() { super(); }
public AssertionFatal(String $arg0) { super($arg0); }
public AssertionFatal(Throwable $arg0) { super($arg0); }
public AssertionFatal(String $arg0, Throwable $arg1) { super($arg0, $arg1); }
}
// Note! You can not make methods like:
// assertNotEquals(byte[] $a, byte[] $b) {
// return !assertEquals($a, $b);
// because they'll still do the failure count and the log messages backwards inside.
//future work:
// i think it should be more or less possible to provide an interface to retrofit ahs TestCase to JUnit, which would be handy for folks that like the ability integrate JUnit with eclipse plugins or the like.
}
| false | true | public synchronized void run() {
List<Unit> $units = getUnits(); // list is assumed immutable on pain of death or idiocy
$numUnits = $units.size();
$numUnitsRun = 0;
$numUnitsPassed = 0;
$numUnitsFailed = 0;
for (int $i = 0; $i < $units.size(); $i++) {
Unit $unit = $units.get($i);
if ($unit == null) continue;
try {
resetFailures();
$log.info(this, "TEST UNIT "+$unit.getName()+" STARTING...");
$numUnitsRun++;
$unit.call();
if ($unit.expectExceptionType() != null) {
$numUnitsFailed++;
$log.error(this.getClass(), "EXPECTED EXCEPTION; TEST CASE ABORTED.");
aborted();
}
if ($unitFailures == 0) {
$numUnitsPassed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
$numUnitsFailed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" FAILED (WITH "+$unitFailures+" FAILURES)!\n");
}
} catch (AssertionFatal $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
} catch (AssertionFailed $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "TEST UNIT "+$unit.getName()+" ABORTED.", $e);
} catch (Throwable $e) {
$numUnitsFailed++;
if ($unit.expectExceptionType() != null) {
// some kind of exception was expected.
if ($unit.expectExceptionType().isAssignableFrom($e.getClass())) {
// and it was this kind that was expected, so this is good.
assertInstanceOf($unit.expectExceptionType(), $e); // generates a normal confirmation message
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
// and it wasn't this kind. this represents fatal failure.
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
} else {
// no exception was expected. any exception represents fatal failure.
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
}
}
if ($numUnitsFailed > 0)
failed();
else
succeeded();
}
| public synchronized void run() {
List<Unit> $units = getUnits(); // list is assumed immutable on pain of death or idiocy
$numUnits = $units.size();
$numUnitsRun = 0;
$numUnitsPassed = 0;
$numUnitsFailed = 0;
for (int $i = 0; $i < $units.size(); $i++) {
Unit $unit = $units.get($i);
if ($unit == null) continue;
try {
resetFailures();
$log.info(this, "TEST UNIT "+$unit.getName()+" STARTING...");
$numUnitsRun++;
$unit.call();
if ($unit.expectExceptionType() != null) {
$numUnitsFailed++;
$log.error(this.getClass(), "EXPECTED EXCEPTION; TEST CASE ABORTED.");
aborted();
}
if ($unitFailures == 0) {
$numUnitsPassed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
$numUnitsFailed++;
$log.info(this, "TEST UNIT "+$unit.getName()+" FAILED (WITH "+$unitFailures+" FAILURES)!\n");
}
} catch (AssertionFatal $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
} catch (AssertionFailed $e) {
$numUnitsFailed++;
$log.error(this.getClass(), "TEST UNIT "+$unit.getName()+" ABORTED.", $e);
} catch (Throwable $e) {
if ($unit.expectExceptionType() != null) {
// some kind of exception was expected.
if ($unit.expectExceptionType().isAssignableFrom($e.getClass())) {
// and it was this kind that was expected, so this is good.
$numUnitsPassed++;
assertInstanceOf($unit.expectExceptionType(), $e); // generates a normal confirmation message
$log.info(this, "TEST UNIT "+$unit.getName()+" PASSED SUCCESSFULLY!\n");
} else {
// and it wasn't this kind. this represents fatal failure.
$numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
} else {
// no exception was expected. any exception represents fatal failure.
$numUnitsFailed++;
$log.error(this.getClass(), "FATAL EXCEPTION; TEST CASE ABORTED.", $e);
aborted();
break;
}
}
}
if ($numUnitsFailed > 0)
failed();
else
succeeded();
}
|
diff --git a/src/com/programmingteam/qsync/QSync.java b/src/com/programmingteam/qsync/QSync.java
index 30a0157..c6ddce2 100644
--- a/src/com/programmingteam/qsync/QSync.java
+++ b/src/com/programmingteam/qsync/QSync.java
@@ -1,216 +1,216 @@
package com.programmingteam.qsync;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import javax.management.modelmbean.XMLParseException;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.programmingteam.Helpers;
///
/// \brief qsync XML file representation
/// Contains of basic config (include ext, compile ext) and list of defined projects.
///
public class QSync
{
private String mIncludes;
private String mCompiles;
private File mPwd;
private ArrayList<QSyncVcxproj> mProjects;
///
/// \brief reads xml file (qsync format) and creates object hierarchy
/// In case of read or XML file format error, this method prints message
/// and make application exti with code -1
///
/// \param [in] qsyncfile path to file to read
///
public QSync(File qsyncfile)
{
Document qsyncDoc =null;
try
{
qsyncDoc = DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(qsyncfile);
}
catch(ParserConfigurationException ex) { System.err.println("QSync: error creating XML configuration"); }
catch(SAXException ex) { System.err.println("Error reading document ("+qsyncfile.getName()+"): "+ex.getMessage()); }
catch(IOException ex) { System.err.println("IOException reading document ("+qsyncfile.getName()+")"); }
if(qsyncDoc==null) System.exit(-1); //If file couldn't be read, kill process
mPwd = qsyncfile.getAbsoluteFile().getParentFile();
try
{
//INCLUDES
NodeList includeExtList = qsyncDoc.getElementsByTagName("ClInclude");
if(includeExtList.getLength() == 0) throw new XMLParseException("element <ClInclude> not found");
if(includeExtList.getLength() >1) throw new XMLParseException("multiple <ClInclude> elements");
Element includes = (Element) qsyncDoc.adoptNode(includeExtList.item(0));
mIncludes = includes.getAttribute("ext");
if(mIncludes.length()==0 || !mIncludes.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClInclude> is not valid");
//COMPILES
NodeList compileExtList = qsyncDoc.getElementsByTagName("ClCompile");
if(compileExtList.getLength() == 0) throw new XMLParseException("element <ClCompile> not found");
if(compileExtList.getLength() >1) throw new XMLParseException("multiple <ClCompile> elements");
Element compiles = (Element) qsyncDoc.adoptNode(compileExtList.item(0));
mCompiles = compiles.getAttribute("ext");
if(mCompiles.length()==0 || !mCompiles.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClCompile> is not valid");
//Projects
mProjects = new ArrayList<QSyncVcxproj>();
NodeList vcxprojList = qsyncDoc.getElementsByTagName("vcxproj");
if(vcxprojList.getLength() == 0) throw new XMLParseException("no <vcxproj> elements found");
//System.out.println("vcxproj length: " + vcxprojList.getLength());
for(int i=vcxprojList.getLength()-1; i>=0; --i)
{
//System.out.println("Project!");
Node projNode = vcxprojList.item(i);
Element projElem = (Element) qsyncDoc.adoptNode(projNode);
String projFile = projElem.getAttribute("proj");
if(projFile.length()==0 || !projFile.matches(".*vcxproj$"))
throw new XMLParseException("proj attribute of <vcxproj> is not valid (must be *.vcxproj)");
projFile = Helpers.resolvePath(mPwd.getAbsolutePath(), projFile);
QSyncVcxproj proj = new QSyncVcxproj(projFile, projFile + ".filters");
NodeList importList = projNode.getChildNodes();
if(importList.getLength() == 0) throw new XMLParseException("no <import> element defined for " + projFile);
for(int j= importList.getLength()-1; j>=0; --j)
{
Node importNode = importList.item(j);
if(!importNode.getNodeName().equals("import")) continue;
String toFilter = importNode.getAttributes().getNamedItem("tofilter").getNodeValue();
toFilter = Helpers.fixSlashes(toFilter);
toFilter = Helpers.stripSlashes(toFilter);
if(toFilter.length()==0 || !toFilter.matches("[a-zA-Z0-9 ]*(\\\\[a-zA-Z0-9 ]*)*"))
throw new XMLParseException("tofilter attribute of <import> is not valid");
QSyncImport imp = new QSyncImport(toFilter);
NodeList importIncludesList = importNode.getChildNodes();
if(importIncludesList.getLength()==0)
throw new XMLParseException("<import> is empty (with tofilter="+toFilter+")");
boolean src=false, inc=false;
for(int n=importIncludesList.getLength()-1; n>=0; --n)
{
Node includeNode = importIncludesList.item(n);
if(!includeNode.getNodeName().equals("include")
&& !includeNode.getNodeName().equals("src")
&& !includeNode.getNodeName().equals("misc"))
continue;
if(includeNode.getNodeName().equals("include"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("accept"))!=null )
imp.setRegexpInclude(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeInc(attrNode.getNodeValue());
if(inc) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <include> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<include> element is empty.");
inc = true;
- String includePath = includeNode.getFirstChild().getNodeValue();
+ String includePath = includeNode.getFirstChild().getNodeValue().trim();
includePath = Helpers.resolvePath(mPwd.getAbsolutePath(), includePath);
imp.setInclude(includePath);
}
else if(includeNode.getNodeName().equals("src"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("regexp"))!=null)
imp.setRegexpSrc(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeSrc(attrNode.getNodeValue());
if(src) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <src> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<src> element is empty");
src = true;
- String srcPath = includeNode.getFirstChild().getNodeValue();
+ String srcPath = includeNode.getFirstChild().getNodeValue().trim();
srcPath = Helpers.resolvePath(mPwd.getAbsolutePath(), srcPath);
imp.setSrc(srcPath);
}
else if(includeNode.getNodeName().equals("misc"))
{
// TODO set regexp
if(includeNode.getFirstChild()==null) throw new XMLParseException("<misc> element is empty");
String miscPath = includeNode.getFirstChild().getNodeValue();
miscPath = Helpers.resolvePath(mPwd.getAbsolutePath(), miscPath);
imp.addMisc(miscPath);
}
else
{
throw new XMLParseException("<import tofilter="+toFilter+"> has invalid element: " + includeNode.getNodeName());
}
}
proj.addImport(imp);
}
mProjects.add(proj);
}
}
catch(XMLParseException ex) { System.err.println("XMLParseExcepiton: " + ex.getMessage()); System.exit(-1); }
}
///
/// \brief returns absolute path to dir containing loaded config file
/// \return File absolute path to dir containing loaded config file
///
public File getPWD()
{
return mPwd;
}
///
/// \brief returns list of projects from config file
/// \return list of projects
///
public ArrayList<QSyncVcxproj> getProjects()
{
return mProjects;
}
public String getIncludeExt()
{
return mIncludes;
}
public String getCompileExt()
{
return mCompiles;
}
///
/// \brief Prints all data loaded from config
///
public void debugPrint()
{
System.out.println("mIncludes: " + mIncludes);
System.out.println("mCompiles: " + mCompiles);
System.out.println("mProjects ("+mProjects.size()+"):");
for(QSyncVcxproj proj: mProjects)
proj.debugPrint();
}
}
| false | true | public QSync(File qsyncfile)
{
Document qsyncDoc =null;
try
{
qsyncDoc = DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(qsyncfile);
}
catch(ParserConfigurationException ex) { System.err.println("QSync: error creating XML configuration"); }
catch(SAXException ex) { System.err.println("Error reading document ("+qsyncfile.getName()+"): "+ex.getMessage()); }
catch(IOException ex) { System.err.println("IOException reading document ("+qsyncfile.getName()+")"); }
if(qsyncDoc==null) System.exit(-1); //If file couldn't be read, kill process
mPwd = qsyncfile.getAbsoluteFile().getParentFile();
try
{
//INCLUDES
NodeList includeExtList = qsyncDoc.getElementsByTagName("ClInclude");
if(includeExtList.getLength() == 0) throw new XMLParseException("element <ClInclude> not found");
if(includeExtList.getLength() >1) throw new XMLParseException("multiple <ClInclude> elements");
Element includes = (Element) qsyncDoc.adoptNode(includeExtList.item(0));
mIncludes = includes.getAttribute("ext");
if(mIncludes.length()==0 || !mIncludes.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClInclude> is not valid");
//COMPILES
NodeList compileExtList = qsyncDoc.getElementsByTagName("ClCompile");
if(compileExtList.getLength() == 0) throw new XMLParseException("element <ClCompile> not found");
if(compileExtList.getLength() >1) throw new XMLParseException("multiple <ClCompile> elements");
Element compiles = (Element) qsyncDoc.adoptNode(compileExtList.item(0));
mCompiles = compiles.getAttribute("ext");
if(mCompiles.length()==0 || !mCompiles.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClCompile> is not valid");
//Projects
mProjects = new ArrayList<QSyncVcxproj>();
NodeList vcxprojList = qsyncDoc.getElementsByTagName("vcxproj");
if(vcxprojList.getLength() == 0) throw new XMLParseException("no <vcxproj> elements found");
//System.out.println("vcxproj length: " + vcxprojList.getLength());
for(int i=vcxprojList.getLength()-1; i>=0; --i)
{
//System.out.println("Project!");
Node projNode = vcxprojList.item(i);
Element projElem = (Element) qsyncDoc.adoptNode(projNode);
String projFile = projElem.getAttribute("proj");
if(projFile.length()==0 || !projFile.matches(".*vcxproj$"))
throw new XMLParseException("proj attribute of <vcxproj> is not valid (must be *.vcxproj)");
projFile = Helpers.resolvePath(mPwd.getAbsolutePath(), projFile);
QSyncVcxproj proj = new QSyncVcxproj(projFile, projFile + ".filters");
NodeList importList = projNode.getChildNodes();
if(importList.getLength() == 0) throw new XMLParseException("no <import> element defined for " + projFile);
for(int j= importList.getLength()-1; j>=0; --j)
{
Node importNode = importList.item(j);
if(!importNode.getNodeName().equals("import")) continue;
String toFilter = importNode.getAttributes().getNamedItem("tofilter").getNodeValue();
toFilter = Helpers.fixSlashes(toFilter);
toFilter = Helpers.stripSlashes(toFilter);
if(toFilter.length()==0 || !toFilter.matches("[a-zA-Z0-9 ]*(\\\\[a-zA-Z0-9 ]*)*"))
throw new XMLParseException("tofilter attribute of <import> is not valid");
QSyncImport imp = new QSyncImport(toFilter);
NodeList importIncludesList = importNode.getChildNodes();
if(importIncludesList.getLength()==0)
throw new XMLParseException("<import> is empty (with tofilter="+toFilter+")");
boolean src=false, inc=false;
for(int n=importIncludesList.getLength()-1; n>=0; --n)
{
Node includeNode = importIncludesList.item(n);
if(!includeNode.getNodeName().equals("include")
&& !includeNode.getNodeName().equals("src")
&& !includeNode.getNodeName().equals("misc"))
continue;
if(includeNode.getNodeName().equals("include"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("accept"))!=null )
imp.setRegexpInclude(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeInc(attrNode.getNodeValue());
if(inc) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <include> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<include> element is empty.");
inc = true;
String includePath = includeNode.getFirstChild().getNodeValue();
includePath = Helpers.resolvePath(mPwd.getAbsolutePath(), includePath);
imp.setInclude(includePath);
}
else if(includeNode.getNodeName().equals("src"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("regexp"))!=null)
imp.setRegexpSrc(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeSrc(attrNode.getNodeValue());
if(src) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <src> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<src> element is empty");
src = true;
String srcPath = includeNode.getFirstChild().getNodeValue();
srcPath = Helpers.resolvePath(mPwd.getAbsolutePath(), srcPath);
imp.setSrc(srcPath);
}
else if(includeNode.getNodeName().equals("misc"))
{
// TODO set regexp
if(includeNode.getFirstChild()==null) throw new XMLParseException("<misc> element is empty");
String miscPath = includeNode.getFirstChild().getNodeValue();
miscPath = Helpers.resolvePath(mPwd.getAbsolutePath(), miscPath);
imp.addMisc(miscPath);
}
else
{
throw new XMLParseException("<import tofilter="+toFilter+"> has invalid element: " + includeNode.getNodeName());
}
}
proj.addImport(imp);
}
mProjects.add(proj);
}
}
catch(XMLParseException ex) { System.err.println("XMLParseExcepiton: " + ex.getMessage()); System.exit(-1); }
}
| public QSync(File qsyncfile)
{
Document qsyncDoc =null;
try
{
qsyncDoc = DocumentBuilderFactory.newInstance()
.newDocumentBuilder()
.parse(qsyncfile);
}
catch(ParserConfigurationException ex) { System.err.println("QSync: error creating XML configuration"); }
catch(SAXException ex) { System.err.println("Error reading document ("+qsyncfile.getName()+"): "+ex.getMessage()); }
catch(IOException ex) { System.err.println("IOException reading document ("+qsyncfile.getName()+")"); }
if(qsyncDoc==null) System.exit(-1); //If file couldn't be read, kill process
mPwd = qsyncfile.getAbsoluteFile().getParentFile();
try
{
//INCLUDES
NodeList includeExtList = qsyncDoc.getElementsByTagName("ClInclude");
if(includeExtList.getLength() == 0) throw new XMLParseException("element <ClInclude> not found");
if(includeExtList.getLength() >1) throw new XMLParseException("multiple <ClInclude> elements");
Element includes = (Element) qsyncDoc.adoptNode(includeExtList.item(0));
mIncludes = includes.getAttribute("ext");
if(mIncludes.length()==0 || !mIncludes.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClInclude> is not valid");
//COMPILES
NodeList compileExtList = qsyncDoc.getElementsByTagName("ClCompile");
if(compileExtList.getLength() == 0) throw new XMLParseException("element <ClCompile> not found");
if(compileExtList.getLength() >1) throw new XMLParseException("multiple <ClCompile> elements");
Element compiles = (Element) qsyncDoc.adoptNode(compileExtList.item(0));
mCompiles = compiles.getAttribute("ext");
if(mCompiles.length()==0 || !mCompiles.matches("^[a-zA-Z0-9]*(,[a-zA-Z0-9]*)*$"))
throw new XMLParseException("src attribute of <ClCompile> is not valid");
//Projects
mProjects = new ArrayList<QSyncVcxproj>();
NodeList vcxprojList = qsyncDoc.getElementsByTagName("vcxproj");
if(vcxprojList.getLength() == 0) throw new XMLParseException("no <vcxproj> elements found");
//System.out.println("vcxproj length: " + vcxprojList.getLength());
for(int i=vcxprojList.getLength()-1; i>=0; --i)
{
//System.out.println("Project!");
Node projNode = vcxprojList.item(i);
Element projElem = (Element) qsyncDoc.adoptNode(projNode);
String projFile = projElem.getAttribute("proj");
if(projFile.length()==0 || !projFile.matches(".*vcxproj$"))
throw new XMLParseException("proj attribute of <vcxproj> is not valid (must be *.vcxproj)");
projFile = Helpers.resolvePath(mPwd.getAbsolutePath(), projFile);
QSyncVcxproj proj = new QSyncVcxproj(projFile, projFile + ".filters");
NodeList importList = projNode.getChildNodes();
if(importList.getLength() == 0) throw new XMLParseException("no <import> element defined for " + projFile);
for(int j= importList.getLength()-1; j>=0; --j)
{
Node importNode = importList.item(j);
if(!importNode.getNodeName().equals("import")) continue;
String toFilter = importNode.getAttributes().getNamedItem("tofilter").getNodeValue();
toFilter = Helpers.fixSlashes(toFilter);
toFilter = Helpers.stripSlashes(toFilter);
if(toFilter.length()==0 || !toFilter.matches("[a-zA-Z0-9 ]*(\\\\[a-zA-Z0-9 ]*)*"))
throw new XMLParseException("tofilter attribute of <import> is not valid");
QSyncImport imp = new QSyncImport(toFilter);
NodeList importIncludesList = importNode.getChildNodes();
if(importIncludesList.getLength()==0)
throw new XMLParseException("<import> is empty (with tofilter="+toFilter+")");
boolean src=false, inc=false;
for(int n=importIncludesList.getLength()-1; n>=0; --n)
{
Node includeNode = importIncludesList.item(n);
if(!includeNode.getNodeName().equals("include")
&& !includeNode.getNodeName().equals("src")
&& !includeNode.getNodeName().equals("misc"))
continue;
if(includeNode.getNodeName().equals("include"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("accept"))!=null )
imp.setRegexpInclude(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeInc(attrNode.getNodeValue());
if(inc) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <include> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<include> element is empty.");
inc = true;
String includePath = includeNode.getFirstChild().getNodeValue().trim();
includePath = Helpers.resolvePath(mPwd.getAbsolutePath(), includePath);
imp.setInclude(includePath);
}
else if(includeNode.getNodeName().equals("src"))
{
Node attrNode;
if( (attrNode=includeNode.getAttributes().getNamedItem("regexp"))!=null)
imp.setRegexpSrc(attrNode.getNodeValue());
if( (attrNode=includeNode.getAttributes().getNamedItem("exclude"))!=null )
imp.setExcludeSrc(attrNode.getNodeValue());
if(src) throw new XMLParseException("<import tofilter="+toFilter+"> has multiple <src> elements");
if(includeNode.getFirstChild()==null) throw new XMLParseException("<src> element is empty");
src = true;
String srcPath = includeNode.getFirstChild().getNodeValue().trim();
srcPath = Helpers.resolvePath(mPwd.getAbsolutePath(), srcPath);
imp.setSrc(srcPath);
}
else if(includeNode.getNodeName().equals("misc"))
{
// TODO set regexp
if(includeNode.getFirstChild()==null) throw new XMLParseException("<misc> element is empty");
String miscPath = includeNode.getFirstChild().getNodeValue();
miscPath = Helpers.resolvePath(mPwd.getAbsolutePath(), miscPath);
imp.addMisc(miscPath);
}
else
{
throw new XMLParseException("<import tofilter="+toFilter+"> has invalid element: " + includeNode.getNodeName());
}
}
proj.addImport(imp);
}
mProjects.add(proj);
}
}
catch(XMLParseException ex) { System.err.println("XMLParseExcepiton: " + ex.getMessage()); System.exit(-1); }
}
|
diff --git a/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java b/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
index b31595c7..14b98ae3 100644
--- a/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
+++ b/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
@@ -1,407 +1,403 @@
package org.geworkbench.parsers;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import javax.swing.ProgressMonitorInputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geworkbench.bison.datastructure.biocollections.microarrays.CSMicroarraySet;
import org.geworkbench.bison.datastructure.biocollections.microarrays.DSMicroarraySet;
import org.geworkbench.bison.datastructure.bioobjects.markers.CSExpressionMarker;
import org.geworkbench.bison.datastructure.bioobjects.markers.DSGeneMarker;
import org.geworkbench.bison.datastructure.bioobjects.markers.annotationparser.AnnotationParser;
import org.geworkbench.bison.datastructure.bioobjects.microarray.CSExpressionMarkerValue;
import org.geworkbench.bison.datastructure.bioobjects.microarray.CSMicroarray;
import org.geworkbench.bison.datastructure.bioobjects.microarray.DSMicroarray;
import org.geworkbench.util.AffyAnnotationUtil;
/**
* @author Nikhil
* @version $Id$
*/
public class GeoSeriesMatrixParser {
static Log log = LogFactory.getLog(SOFTFileFormat.class);
private static final String commentSign1 = "#";
private static final String commentSign2 = "!";
private static final String commentSign3 = "^";
private static final String columnSeperator = "\t";
private static final String duplicateLabelModificator = "_2";
CSMicroarraySet maSet = new CSMicroarraySet();
List<String> markArrays = new ArrayList<String>();
private int possibleMarkers = 0;
transient private String errorMessage;
/*
* (non-Javadoc)
* @see org.geworkbench.components.parsers.FileFormat#checkFormat(java.io.File)
*/
public boolean checkFormat(File file) throws InterruptedIOException {
BufferedReader reader = null;
ProgressMonitorInputStream progressIn = null;
try {
FileInputStream fileIn = new FileInputStream(file);
progressIn = new ProgressMonitorInputStream(
null, "Loading data from " + file.getName(), fileIn);
reader = new BufferedReader(new InputStreamReader(
progressIn));
String line = null;
int totalColumns = 0;
List<String> markers = new ArrayList<String>();
List<String> arrays = new ArrayList<String>();
int lineIndex = 0;
int headerLineIndex = 0;
while ((line = reader.readLine()) != null) { // for each line
/*
* Adding comments that start with '!' and '#' from the GEO SOFT file to the Experiment Information tab
*/
if (line.startsWith(commentSign1) || line.startsWith(commentSign2)) {
//Ignoring the lines that has '!series_matrix_table_begin' and '!series_matrix_table_end'
if(!line.equalsIgnoreCase("!series_matrix_table_begin") && !line.equalsIgnoreCase("!series_matrix_table_end")) {
// to be consistent, this detailed information should be used else where instead of as "description" field
// maSet.setDescription(line.substring(1));
}
}
String[] mark = line.split("\t");
if(mark[0].equals("!Sample_title")){
for (int i=1;i<mark.length;i++){
markArrays.add(mark[i]);
}
}
if ((line.indexOf(commentSign1) < 0)
&& (line.indexOf(commentSign2) != 0)
&& (line.indexOf(commentSign3) != 0)
&& (line.length() > 0)) {// we'll skip comments and
// anything before header
if (headerLineIndex == 0) {
// no header detected yet, then
// this is the header.
headerLineIndex = lineIndex;
}
int columnIndex = 0;
int accessionIndex = 0;
String[] tokens = line.split(columnSeperator);
for(String token: tokens) { // for each column
token = token.trim();
if ((headerLineIndex > 0) && (columnIndex == 0)) {
/*
* if this line is after header, then first column
* should be our marker name
*/
if (markers.contains(token)) {// duplicate markers
log.error("Duplicate Markers: "+token);
errorMessage = "Duplicate Markers: "+token;
return false;
} else {
markers.add(token);
}
} else if (headerLineIndex == lineIndex) { // header
if (token.equals("")) {
accessionIndex = columnIndex;
} else if (arrays.contains(token)) {// duplicate arrays
log.error("Duplicate Arrays labels " + token
+ " in " + file.getName());
errorMessage = "Duplicate Arrays labels "
+ token + " in " + file.getName();
return false;
} else {
arrays.add(token);
}
}
columnIndex++;
lineIndex++;
} // end of the while loop parsing one line
/* check if column match or not */
if (headerLineIndex > 0) {
/*
* if this line is real data, we assume lines after
* header are real data. (we might have bug here)
*/
if (totalColumns == 0) { /* not been set yet */
totalColumns = columnIndex - accessionIndex;
} else if (columnIndex != totalColumns){ // if not equal
errorMessage = "Columns do not match: columnIndex="+columnIndex+" totalColumns="+totalColumns+" lineIndex="+lineIndex;
return false;
}
}
} // end of if block for one line
} // end of while loop of read line
possibleMarkers = markers.size();
fileIn.close();
} catch (java.io.InterruptedIOException ie) {
if ( progressIn.getProgressMonitor().isCanceled())
{
throw ie;
}
else {
ie.printStackTrace();
}
} catch (Exception e) {
log.error("GEO SOFT check file format exception: " + e);
e.printStackTrace();
errorMessage = "GEO SOFT check file format exception: " + e;
return false;
} finally {
try {
reader.close();
} catch (IOException e) {
// no-op
e.printStackTrace();
}
}
return true;
}
/*
* (non-Javadoc)
*
* @see org.geworkbench.components.parsers.FileFormat#getMArraySet(java.io.File)
*/
public DSMicroarraySet getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
- int dotIndex = fileName.lastIndexOf(extSeperater);
- if (dotIndex != -1) {
- fileName = fileName.substring(0, dotIndex);
- }
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ ": "
+markAnn1;
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1,
DSMicroarraySet.affyTxtType);
maSet.add(array);
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkers().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
if (result != null) {
break;
}
}
if (result == null) {
AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkers()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
}
| true | true | public DSMicroarraySet getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
int dotIndex = fileName.lastIndexOf(extSeperater);
if (dotIndex != -1) {
fileName = fileName.substring(0, dotIndex);
}
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ ": "
+markAnn1;
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1,
DSMicroarraySet.affyTxtType);
maSet.add(array);
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkers().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
if (result != null) {
break;
}
}
if (result == null) {
AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkers()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
| public DSMicroarraySet getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ ": "
+markAnn1;
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1,
DSMicroarraySet.affyTxtType);
maSet.add(array);
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkers().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
DSMicroarray microarray = (DSMicroarray)maSet.get(i);
microarray.setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
if (result != null) {
break;
}
}
if (result == null) {
AffyAnnotationUtil.matchAffyAnnotationFile(maSet);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkers()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
|
diff --git a/src/com/android/settings/CryptKeeper.java b/src/com/android/settings/CryptKeeper.java
index 55435a031..3f35fa80e 100644
--- a/src/com/android/settings/CryptKeeper.java
+++ b/src/com/android/settings/CryptKeeper.java
@@ -1,595 +1,595 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings;
import android.app.Activity;
import android.app.StatusBarManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.PowerManager;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.SystemProperties;
import android.os.storage.IMountService;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodInfo;
import android.view.inputmethod.InputMethodManager;
import android.view.inputmethod.InputMethodSubtype;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.android.internal.telephony.ITelephony;
import java.util.List;
/**
* Settings screens to show the UI flows for encrypting/decrypting the device.
*
* This may be started via adb for debugging the UI layout, without having to go through
* encryption flows everytime. It should be noted that starting the activity in this manner
* is only useful for verifying UI-correctness - the behavior will not be identical.
* <pre>
* $ adb shell pm enable com.android.settings/.CryptKeeper
* $ adb shell am start \
* -e "com.android.settings.CryptKeeper.DEBUG_FORCE_VIEW" "progress" \
* -n com.android.settings/.CryptKeeper
* </pre>
*/
public class CryptKeeper extends Activity implements TextView.OnEditorActionListener {
private static final String TAG = "CryptKeeper";
private static final String DECRYPT_STATE = "trigger_restart_framework";
private static final int UPDATE_PROGRESS = 1;
private static final int COOLDOWN = 2;
private static final int MAX_FAILED_ATTEMPTS = 30;
private static final int COOL_DOWN_ATTEMPTS = 10;
private static final int COOL_DOWN_INTERVAL = 30; // 30 seconds
// Intent action for launching the Emergency Dialer activity.
static final String ACTION_EMERGENCY_DIAL = "com.android.phone.EmergencyDialer.DIAL";
// Debug Intent extras so that this Activity may be started via adb for debugging UI layouts
private static final String EXTRA_FORCE_VIEW =
"com.android.settings.CryptKeeper.DEBUG_FORCE_VIEW";
private static final String FORCE_VIEW_PROGRESS = "progress";
private static final String FORCE_VIEW_ERROR = "error";
/** When encryption is detected, this flag indicates whether or not we've checked for errors. */
private boolean mValidationComplete;
private boolean mValidationRequested;
/** A flag to indicate that the volume is in a bad state (e.g. partially encrypted). */
private boolean mEncryptionGoneBad;
private int mCooldown;
PowerManager.WakeLock mWakeLock;
private EditText mPasswordEntry;
/**
* Used to propagate state through configuration changes (e.g. screen rotation)
*/
private static class NonConfigurationInstanceState {
final PowerManager.WakeLock wakelock;
NonConfigurationInstanceState(PowerManager.WakeLock _wakelock) {
wakelock = _wakelock;
}
}
/**
* Activity used to fade the screen to black after the password is entered.
*/
public static class FadeToBlack extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.crypt_keeper_blank);
}
}
private class DecryptTask extends AsyncTask<String, Void, Integer> {
@Override
protected Integer doInBackground(String... params) {
IMountService service = getMountService();
try {
return service.decryptStorage(params[0]);
} catch (Exception e) {
Log.e(TAG, "Error while decrypting...", e);
return -1;
}
}
@Override
protected void onPostExecute(Integer failedAttempts) {
if (failedAttempts == 0) {
// The password was entered successfully. Start the Blank activity
// so this activity animates to black before the devices starts. Note
// It has 1 second to complete the animation or it will be frozen
// until the boot animation comes back up.
Intent intent = new Intent(CryptKeeper.this, FadeToBlack.class);
finish();
startActivity(intent);
} else if (failedAttempts == MAX_FAILED_ATTEMPTS) {
// Factory reset the device.
sendBroadcast(new Intent("android.intent.action.MASTER_CLEAR"));
} else if ((failedAttempts % COOL_DOWN_ATTEMPTS) == 0) {
mCooldown = COOL_DOWN_INTERVAL;
cooldown();
} else {
final TextView status = (TextView) findViewById(R.id.status);
status.setText(R.string.try_again);
status.setVisibility(View.VISIBLE);
// Reenable the password entry
mPasswordEntry.setEnabled(true);
}
}
}
private class ValidationTask extends AsyncTask<Void, Void, Boolean> {
@Override
protected Boolean doInBackground(Void... params) {
IMountService service = getMountService();
try {
Log.d(TAG, "Validating encryption state.");
int state = service.getEncryptionState();
if (state == IMountService.ENCRYPTION_STATE_NONE) {
Log.w(TAG, "Unexpectedly in CryptKeeper even though there is no encryption.");
return true; // Unexpected, but fine, I guess...
}
return state == IMountService.ENCRYPTION_STATE_OK;
} catch (RemoteException e) {
Log.w(TAG, "Unable to get encryption state properly");
return true;
}
}
@Override
protected void onPostExecute(Boolean result) {
mValidationComplete = true;
if (Boolean.FALSE.equals(result)) {
Log.w(TAG, "Incomplete, or corrupted encryption detected. Prompting user to wipe.");
mEncryptionGoneBad = true;
} else {
Log.d(TAG, "Encryption state validated. Proceeding to configure UI");
}
setupUi();
}
}
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case UPDATE_PROGRESS:
updateProgress();
break;
case COOLDOWN:
cooldown();
break;
}
}
};
/** @return whether or not this Activity was started for debugging the UI only. */
private boolean isDebugView() {
return getIntent().hasExtra(EXTRA_FORCE_VIEW);
}
/** @return whether or not this Activity was started for debugging the specific UI view only. */
private boolean isDebugView(String viewType /* non-nullable */) {
return viewType.equals(getIntent().getStringExtra(EXTRA_FORCE_VIEW));
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// If we are not encrypted or encrypting, get out quickly.
String state = SystemProperties.get("vold.decrypt");
if (!isDebugView() && ("".equals(state) || DECRYPT_STATE.equals(state))) {
// Disable the crypt keeper.
PackageManager pm = getPackageManager();
ComponentName name = new ComponentName(this, CryptKeeper.class);
pm.setComponentEnabledSetting(name, PackageManager.COMPONENT_ENABLED_STATE_DISABLED,
PackageManager.DONT_KILL_APP);
// Typically CryptKeeper is launched as the home app. We didn't
// want to be running, so need to finish this activity. We can count
// on the activity manager re-launching the new home app upon finishing
// this one, since this will leave the activity stack empty.
// NOTE: This is really grungy. I think it would be better for the
// activity manager to explicitly launch the crypt keeper instead of
// home in the situation where we need to decrypt the device
finish();
return;
}
- // Disable the status bar
+ // Disable the status bar, but do NOT disable back because the user needs a way to go
+ // from keyboard settings and back to the password screen.
StatusBarManager sbm = (StatusBarManager) getSystemService(Context.STATUS_BAR_SERVICE);
sbm.disable(StatusBarManager.DISABLE_EXPAND
| StatusBarManager.DISABLE_NOTIFICATION_ICONS
| StatusBarManager.DISABLE_NOTIFICATION_ALERTS
| StatusBarManager.DISABLE_SYSTEM_INFO
| StatusBarManager.DISABLE_HOME
- | StatusBarManager.DISABLE_RECENT
- | StatusBarManager.DISABLE_BACK);
+ | StatusBarManager.DISABLE_RECENT);
// Check for (and recover) retained instance data
Object lastInstance = getLastNonConfigurationInstance();
if (lastInstance instanceof NonConfigurationInstanceState) {
NonConfigurationInstanceState retained = (NonConfigurationInstanceState) lastInstance;
mWakeLock = retained.wakelock;
Log.d(TAG, "Restoring wakelock from NonConfigurationInstanceState");
}
}
/**
* Note, we defer the state check and screen setup to onStart() because this will be
* re-run if the user clicks the power button (sleeping/waking the screen), and this is
* especially important if we were to lose the wakelock for any reason.
*/
@Override
public void onStart() {
super.onStart();
setupUi();
}
/**
* Initializes the UI based on the current state of encryption.
* This is idempotent - calling repeatedly will simply re-initialize the UI.
*/
private void setupUi() {
if (mEncryptionGoneBad || isDebugView(FORCE_VIEW_ERROR)) {
setContentView(R.layout.crypt_keeper_progress);
showFactoryReset();
return;
}
String progress = SystemProperties.get("vold.encrypt_progress");
if (!"".equals(progress) || isDebugView(FORCE_VIEW_PROGRESS)) {
setContentView(R.layout.crypt_keeper_progress);
encryptionProgressInit();
} else if (mValidationComplete) {
setContentView(R.layout.crypt_keeper_password_entry);
passwordEntryInit();
} else if (!mValidationRequested) {
// We're supposed to be encrypted, but no validation has been done.
new ValidationTask().execute((Void[]) null);
mValidationRequested = true;
}
}
@Override
public void onStop() {
super.onStop();
mHandler.removeMessages(COOLDOWN);
mHandler.removeMessages(UPDATE_PROGRESS);
}
/**
* Reconfiguring, so propagate the wakelock to the next instance. This runs between onStop()
* and onDestroy() and only if we are changing configuration (e.g. rotation). Also clears
* mWakeLock so the subsequent call to onDestroy does not release it.
*/
@Override
public Object onRetainNonConfigurationInstance() {
NonConfigurationInstanceState state = new NonConfigurationInstanceState(mWakeLock);
Log.d(TAG, "Handing wakelock off to NonConfigurationInstanceState");
mWakeLock = null;
return state;
}
@Override
public void onDestroy() {
super.onDestroy();
if (mWakeLock != null) {
Log.d(TAG, "Releasing and destroying wakelock");
mWakeLock.release();
mWakeLock = null;
}
}
private void encryptionProgressInit() {
// Accquire a partial wakelock to prevent the device from sleeping. Note
// we never release this wakelock as we will be restarted after the device
// is encrypted.
Log.d(TAG, "Encryption progress screen initializing.");
if (mWakeLock == null) {
Log.d(TAG, "Acquiring wakelock.");
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.FULL_WAKE_LOCK, TAG);
mWakeLock.acquire();
}
((ProgressBar) findViewById(R.id.progress_bar)).setIndeterminate(true);
updateProgress();
}
private void showFactoryReset() {
// Hide the encryption-bot to make room for the "factory reset" button
findViewById(R.id.encroid).setVisibility(View.GONE);
// Show the reset button, failure text, and a divider
final Button button = (Button) findViewById(R.id.factory_reset);
button.setVisibility(View.VISIBLE);
button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Factory reset the device.
sendBroadcast(new Intent("android.intent.action.MASTER_CLEAR"));
}
});
// Alert the user of the failure.
((TextView) findViewById(R.id.title)).setText(R.string.crypt_keeper_failed_title);
((TextView) findViewById(R.id.status)).setText(R.string.crypt_keeper_failed_summary);
final View view = findViewById(R.id.bottom_divider);
// TODO(viki): Why would the bottom divider be missing in certain layouts? Investigate.
if (view != null) {
view.setVisibility(View.VISIBLE);
}
}
private void updateProgress() {
final String state = SystemProperties.get("vold.encrypt_progress");
if ("error_partially_encrypted".equals(state)) {
showFactoryReset();
return;
}
int progress = 0;
try {
// Force a 50% progress state when debugging the view.
progress = isDebugView() ? 50 : Integer.parseInt(state);
} catch (Exception e) {
Log.w(TAG, "Error parsing progress: " + e.toString());
}
final CharSequence status = getText(R.string.crypt_keeper_setup_description);
Log.v(TAG, "Encryption progress: " + progress);
final TextView tv = (TextView) findViewById(R.id.status);
tv.setText(TextUtils.expandTemplate(status, Integer.toString(progress)));
// Check the progress every 5 seconds
mHandler.removeMessages(UPDATE_PROGRESS);
mHandler.sendEmptyMessageDelayed(UPDATE_PROGRESS, 5000);
}
private void cooldown() {
final TextView status = (TextView) findViewById(R.id.status);
if (mCooldown <= 0) {
// Re-enable the password entry
mPasswordEntry.setEnabled(true);
status.setVisibility(View.GONE);
} else {
CharSequence template = getText(R.string.crypt_keeper_cooldown);
status.setText(TextUtils.expandTemplate(template, Integer.toString(mCooldown)));
status.setVisibility(View.VISIBLE);
mCooldown--;
mHandler.removeMessages(COOLDOWN);
mHandler.sendEmptyMessageDelayed(COOLDOWN, 1000); // Tick every second
}
}
private void passwordEntryInit() {
mPasswordEntry = (EditText) findViewById(R.id.passwordEntry);
mPasswordEntry.setOnEditorActionListener(this);
mPasswordEntry.requestFocus();
final View imeSwitcher = findViewById(R.id.switch_ime_button);
final InputMethodManager imm = (InputMethodManager) getSystemService(
Context.INPUT_METHOD_SERVICE);
if (imeSwitcher != null && hasMultipleEnabledIMEsOrSubtypes(imm, false)) {
imeSwitcher.setVisibility(View.VISIBLE);
imeSwitcher.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
imm.showInputMethodPicker();
}
});
}
// Asynchronously throw up the IME, since there are issues with requesting it to be shown
// immediately.
mHandler.postDelayed(new Runnable() {
@Override public void run() {
imm.showSoftInputUnchecked(0, null);
}
}, 0);
updateEmergencyCallButtonState();
}
/**
* Method adapted from com.android.inputmethod.latin.Utils
*
* @param imm The input method manager
* @param shouldIncludeAuxiliarySubtypes
* @return true if we have multiple IMEs to choose from
*/
private boolean hasMultipleEnabledIMEsOrSubtypes(InputMethodManager imm,
final boolean shouldIncludeAuxiliarySubtypes) {
final List<InputMethodInfo> enabledImis = imm.getEnabledInputMethodList();
// Number of the filtered IMEs
int filteredImisCount = 0;
for (InputMethodInfo imi : enabledImis) {
// We can return true immediately after we find two or more filtered IMEs.
if (filteredImisCount > 1) return true;
final List<InputMethodSubtype> subtypes =
imm.getEnabledInputMethodSubtypeList(imi, true);
// IMEs that have no subtypes should be counted.
if (subtypes.isEmpty()) {
++filteredImisCount;
continue;
}
int auxCount = 0;
for (InputMethodSubtype subtype : subtypes) {
if (subtype.isAuxiliary()) {
++auxCount;
}
}
final int nonAuxCount = subtypes.size() - auxCount;
// IMEs that have one or more non-auxiliary subtypes should be counted.
// If shouldIncludeAuxiliarySubtypes is true, IMEs that have two or more auxiliary
// subtypes should be counted as well.
if (nonAuxCount > 0 || (shouldIncludeAuxiliarySubtypes && auxCount > 1)) {
++filteredImisCount;
continue;
}
}
return filteredImisCount > 1
// imm.getEnabledInputMethodSubtypeList(null, false) will return the current IME's enabled
// input method subtype (The current IME should be LatinIME.)
|| imm.getEnabledInputMethodSubtypeList(null, false).size() > 1;
}
private IMountService getMountService() {
final IBinder service = ServiceManager.getService("mount");
if (service != null) {
return IMountService.Stub.asInterface(service);
}
return null;
}
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if (actionId == EditorInfo.IME_NULL || actionId == EditorInfo.IME_ACTION_DONE) {
// Get the password
final String password = v.getText().toString();
if (TextUtils.isEmpty(password)) {
return true;
}
// Now that we have the password clear the password field.
v.setText(null);
// Disable the password entry while checking the password. This
// we either be re-enabled if the password was wrong or after the
// cooldown period.
mPasswordEntry.setEnabled(false);
Log.d(TAG, "Attempting to send command to decrypt");
new DecryptTask().execute(password);
return true;
}
return false;
}
/**
* Code to update the state of, and handle clicks from, the "Emergency call" button.
*
* This code is mostly duplicated from the corresponding code in
* LockPatternUtils and LockPatternKeyguardView under frameworks/base.
*/
private void updateEmergencyCallButtonState() {
final Button emergencyCall = (Button) findViewById(R.id.emergencyCallButton);
// The button isn't present at all in some configurations.
if (emergencyCall == null)
return;
if (isEmergencyCallCapable()) {
emergencyCall.setVisibility(View.VISIBLE);
emergencyCall.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takeEmergencyCallAction();
}
});
} else {
emergencyCall.setVisibility(View.GONE);
return;
}
final int newState = TelephonyManager.getDefault().getCallState();
int textId;
if (newState == TelephonyManager.CALL_STATE_OFFHOOK) {
// Show "return to call" text and show phone icon
textId = R.string.cryptkeeper_return_to_call;
final int phoneCallIcon = R.drawable.stat_sys_phone_call;
emergencyCall.setCompoundDrawablesWithIntrinsicBounds(phoneCallIcon, 0, 0, 0);
} else {
textId = R.string.cryptkeeper_emergency_call;
final int emergencyIcon = R.drawable.ic_emergency;
emergencyCall.setCompoundDrawablesWithIntrinsicBounds(emergencyIcon, 0, 0, 0);
}
emergencyCall.setText(textId);
}
private boolean isEmergencyCallCapable() {
return getResources().getBoolean(com.android.internal.R.bool.config_voice_capable);
}
private void takeEmergencyCallAction() {
if (TelephonyManager.getDefault().getCallState() == TelephonyManager.CALL_STATE_OFFHOOK) {
resumeCall();
} else {
launchEmergencyDialer();
}
}
private void resumeCall() {
final ITelephony phone = ITelephony.Stub.asInterface(ServiceManager.checkService("phone"));
if (phone != null) {
try {
phone.showCallScreen();
} catch (RemoteException e) {
Log.e(TAG, "Error calling ITelephony service: " + e);
}
}
}
private void launchEmergencyDialer() {
final Intent intent = new Intent(ACTION_EMERGENCY_DIAL);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
| Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
startActivity(intent);
}
}
| false | true | public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// If we are not encrypted or encrypting, get out quickly.
String state = SystemProperties.get("vold.decrypt");
if (!isDebugView() && ("".equals(state) || DECRYPT_STATE.equals(state))) {
// Disable the crypt keeper.
PackageManager pm = getPackageManager();
ComponentName name = new ComponentName(this, CryptKeeper.class);
pm.setComponentEnabledSetting(name, PackageManager.COMPONENT_ENABLED_STATE_DISABLED,
PackageManager.DONT_KILL_APP);
// Typically CryptKeeper is launched as the home app. We didn't
// want to be running, so need to finish this activity. We can count
// on the activity manager re-launching the new home app upon finishing
// this one, since this will leave the activity stack empty.
// NOTE: This is really grungy. I think it would be better for the
// activity manager to explicitly launch the crypt keeper instead of
// home in the situation where we need to decrypt the device
finish();
return;
}
// Disable the status bar
StatusBarManager sbm = (StatusBarManager) getSystemService(Context.STATUS_BAR_SERVICE);
sbm.disable(StatusBarManager.DISABLE_EXPAND
| StatusBarManager.DISABLE_NOTIFICATION_ICONS
| StatusBarManager.DISABLE_NOTIFICATION_ALERTS
| StatusBarManager.DISABLE_SYSTEM_INFO
| StatusBarManager.DISABLE_HOME
| StatusBarManager.DISABLE_RECENT
| StatusBarManager.DISABLE_BACK);
// Check for (and recover) retained instance data
Object lastInstance = getLastNonConfigurationInstance();
if (lastInstance instanceof NonConfigurationInstanceState) {
NonConfigurationInstanceState retained = (NonConfigurationInstanceState) lastInstance;
mWakeLock = retained.wakelock;
Log.d(TAG, "Restoring wakelock from NonConfigurationInstanceState");
}
}
| public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// If we are not encrypted or encrypting, get out quickly.
String state = SystemProperties.get("vold.decrypt");
if (!isDebugView() && ("".equals(state) || DECRYPT_STATE.equals(state))) {
// Disable the crypt keeper.
PackageManager pm = getPackageManager();
ComponentName name = new ComponentName(this, CryptKeeper.class);
pm.setComponentEnabledSetting(name, PackageManager.COMPONENT_ENABLED_STATE_DISABLED,
PackageManager.DONT_KILL_APP);
// Typically CryptKeeper is launched as the home app. We didn't
// want to be running, so need to finish this activity. We can count
// on the activity manager re-launching the new home app upon finishing
// this one, since this will leave the activity stack empty.
// NOTE: This is really grungy. I think it would be better for the
// activity manager to explicitly launch the crypt keeper instead of
// home in the situation where we need to decrypt the device
finish();
return;
}
// Disable the status bar, but do NOT disable back because the user needs a way to go
// from keyboard settings and back to the password screen.
StatusBarManager sbm = (StatusBarManager) getSystemService(Context.STATUS_BAR_SERVICE);
sbm.disable(StatusBarManager.DISABLE_EXPAND
| StatusBarManager.DISABLE_NOTIFICATION_ICONS
| StatusBarManager.DISABLE_NOTIFICATION_ALERTS
| StatusBarManager.DISABLE_SYSTEM_INFO
| StatusBarManager.DISABLE_HOME
| StatusBarManager.DISABLE_RECENT);
// Check for (and recover) retained instance data
Object lastInstance = getLastNonConfigurationInstance();
if (lastInstance instanceof NonConfigurationInstanceState) {
NonConfigurationInstanceState retained = (NonConfigurationInstanceState) lastInstance;
mWakeLock = retained.wakelock;
Log.d(TAG, "Restoring wakelock from NonConfigurationInstanceState");
}
}
|
diff --git a/JavaSource/org/unitime/timetable/dataexchange/StudentEnrollmentImport.java b/JavaSource/org/unitime/timetable/dataexchange/StudentEnrollmentImport.java
index a7be6a1c..f51e7882 100644
--- a/JavaSource/org/unitime/timetable/dataexchange/StudentEnrollmentImport.java
+++ b/JavaSource/org/unitime/timetable/dataexchange/StudentEnrollmentImport.java
@@ -1,285 +1,284 @@
/*
* UniTime 3.2 (University Timetabling Application)
* Copyright (C) 2008-2009, UniTime LLC, and individual contributors
* as indicated by the @authors tag.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package org.unitime.timetable.dataexchange;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.dom4j.Element;
import org.unitime.timetable.ApplicationProperties;
import org.unitime.timetable.model.ChangeLog;
import org.unitime.timetable.model.Class_;
import org.unitime.timetable.model.CourseOffering;
import org.unitime.timetable.model.Exam;
import org.unitime.timetable.model.Session;
import org.unitime.timetable.model.Student;
import org.unitime.timetable.model.StudentClassEnrollment;
import org.unitime.timetable.model.StudentSectioningQueue;
import org.unitime.timetable.model.dao.StudentDAO;
import org.unitime.timetable.model.dao._RootDAO;
import org.unitime.timetable.test.UpdateExamConflicts;
public class StudentEnrollmentImport extends BaseImport {
public StudentEnrollmentImport() {
super();
}
@Override
public void loadXml(Element rootElement) throws Exception {
boolean trimLeadingZerosFromExternalId = "true".equals(ApplicationProperties.getProperty("tmtbl.data.exchange.trim.externalId","false"));
if (!rootElement.getName().equalsIgnoreCase("studentEnrollments"))
throw new Exception("Given XML file is not a Student Enrollments load file.");
Session session = null;
Set<Long> updatedStudents = new HashSet<Long>();
try {
String campus = rootElement.attributeValue("campus");
String year = rootElement.attributeValue("year");
String term = rootElement.attributeValue("term");
String created = rootElement.attributeValue("created");
beginTransaction();
session = Session.getSessionUsingInitiativeYearTerm(campus, year, term);
if(session == null)
throw new Exception("No session found for the given campus, year, and term.");
HashMap<String, Class_> extId2class = new HashMap<String, Class_>();
HashMap<String, Class_> name2class = new HashMap<String, Class_>();
HashMap<String, CourseOffering> extId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> name2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cextId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cname2course = new HashMap<String, CourseOffering>();
HashMap<Long, Set<CourseOffering>> class2courses = new HashMap<Long, Set<CourseOffering>>();
for (Object[] o: (List<Object[]>)getHibSession().createQuery(
"select c, co from Class_ c inner join c.schedulingSubpart.instrOfferingConfig.instructionalOffering.courseOfferings co where " +
"c.schedulingSubpart.instrOfferingConfig.instructionalOffering.session.uniqueId = :sessionId")
.setLong("sessionId", session.getUniqueId()).list()) {
Class_ clazz = (Class_)o[0];
CourseOffering course = (CourseOffering)o[1];
String extId = clazz.getExternalId(course);
if (extId != null && !extId.isEmpty())
extId2class.put(extId, clazz);
String name = clazz.getClassLabel(course);
name2class.put(name, clazz);
name2course.put(name, course);
if (!extId2course.containsKey(extId) || course.isIsControl())
extId2course.put(extId, course);
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course.getExternalUniqueId() != null && !course.getExternalUniqueId().isEmpty())
cextId2course.put(course.getExternalUniqueId(), course);
cname2course.put(course.getCourseName(), course);
if (courses == null) {
courses = new HashSet<CourseOffering>();
class2courses.put(clazz.getUniqueId(), courses);
}
courses.add(course);
}
debug("classes loaded");
if (created != null)
ChangeLog.addChange(getHibSession(), getManager(), session, session, created, ChangeLog.Source.DATA_IMPORT_STUDENT_ENROLLMENTS, ChangeLog.Operation.UPDATE, null, null);
Hashtable<String, Student> students = new Hashtable<String, Student>();
for (Student student: StudentDAO.getInstance().findBySession(getHibSession(), session.getUniqueId())) {
if (student.getExternalUniqueId() != null)
students.put(student.getExternalUniqueId(), student);
}
for (Iterator i = rootElement.elementIterator("student"); i.hasNext(); ) {
Element studentElement = (Element) i.next();
String externalId = studentElement.attributeValue("externalId");
if (externalId == null) continue;
while (trimLeadingZerosFromExternalId && externalId.startsWith("0")) externalId = externalId.substring(1);
Student student = students.remove(externalId);
if (student == null) {
student = new Student();
student.setSession(session);
student.setFirstName(studentElement.attributeValue("firstName", "Name"));
student.setMiddleName(studentElement.attributeValue("middleName"));
student.setLastName(studentElement.attributeValue("lastName", "Unknown"));
student.setEmail(studentElement.attributeValue("email"));
student.setExternalUniqueId(externalId);
student.setFreeTimeCategory(0);
student.setSchedulePreference(0);
student.setClassEnrollments(new HashSet<StudentClassEnrollment>());
}
Hashtable<Long, StudentClassEnrollment> enrollments = new Hashtable<Long, StudentClassEnrollment>();
for (StudentClassEnrollment enrollment: student.getClassEnrollments()) {
enrollments.put(enrollment.getClazz().getUniqueId(), enrollment);
}
for (Iterator j = studentElement.elementIterator("class"); j.hasNext(); ) {
Element classElement = (Element) j.next();
Class_ clazz = null;
CourseOffering course = null;
String classExternalId = classElement.attributeValue("externalId");
if (classExternalId != null) {
clazz = extId2class.get(classExternalId);
course = extId2course.get(classExternalId);
if (clazz == null) {
clazz = name2class.get(classExternalId);
course = name2course.get(classExternalId);
}
}
if (clazz == null && classElement.attributeValue("name") != null) {
String className = classElement.attributeValue("name");
clazz = name2class.get(className);
course = name2course.get(className);
}
String courseName = classElement.attributeValue("course");
if (courseName != null) {
course = cname2course.get(courseName);
} else {
String subject = classElement.attributeValue("subject");
String courseNbr = classElement.attributeValue("courseNbr");
if (subject != null && courseNbr != null)
course = cname2course.get(subject + " " + courseNbr);
}
if (course != null && clazz == null) {
String type = classElement.attributeValue("type");
String suffix = classElement.attributeValue("suffix");
if (type != null && suffix != null)
clazz = name2class.get(course.getCourseName() + " " + type.trim() + " " + suffix);
}
if (clazz == null) {
warn("Class " + (classExternalId != null ? classExternalId : classElement.attributeValue("name",
classElement.attributeValue("course", classElement.attributeValue("subject") + " " + classElement.attributeValue("courseNbr")) + " " +
classElement.attributeValue("type") + " " + classElement.attributeValue("suffix"))) + " not found.");
continue;
}
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course == null || !courses.contains(course)) {
for (CourseOffering co: courses)
if (co.isIsControl())
{ course = co; break; }
}
StudentClassEnrollment enrollment = enrollments.remove(clazz.getUniqueId());
if (enrollment != null) continue; // enrollment already exists
enrollment = new StudentClassEnrollment();
enrollment.setStudent(student);
enrollment.setClazz(clazz);
enrollment.setCourseOffering(course);
enrollment.setTimestamp(new java.util.Date());
student.getClassEnrollments().add(enrollment);
if (student.getUniqueId() != null) updatedStudents.add(student.getUniqueId());
}
if (!enrollments.isEmpty()) {
for (StudentClassEnrollment enrollment: enrollments.values()) {
student.getClassEnrollments().remove(enrollment);
getHibSession().delete(enrollment);
updatedStudents.add(student.getUniqueId());
}
}
if (student.getUniqueId() == null) {
updatedStudents.add((Long)getHibSession().save(student));
} else {
getHibSession().update(student);
}
- flushIfNeeded(true);
}
for (Student student: students.values()) {
for (Iterator<StudentClassEnrollment> i = student.getClassEnrollments().iterator(); i.hasNext(); ) {
StudentClassEnrollment enrollment = i.next();
getHibSession().delete(enrollment);
i.remove();
updatedStudents.add(student.getUniqueId());
}
getHibSession().update(student);
}
if (!updatedStudents.isEmpty())
StudentSectioningQueue.studentChanged(getHibSession(), session.getUniqueId(), updatedStudents);
commitTransaction();
debug(updatedStudents.size() + " students changed");
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
throw e;
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.finalExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeFinal, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.midtermExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeMidterm, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session != null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.class.updateEnrollments","true"))){
org.hibernate.Session hibSession = new _RootDAO().createNewSession();
try {
info(" Updating class enrollments...");
Class_.updateClassEnrollmentForSession(session, hibSession);
info(" Updating course offering enrollments...");
CourseOffering.updateCourseOfferingEnrollmentForSession(session, hibSession);
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
} finally {
hibSession.close();
}
}
}
}
| true | true | public void loadXml(Element rootElement) throws Exception {
boolean trimLeadingZerosFromExternalId = "true".equals(ApplicationProperties.getProperty("tmtbl.data.exchange.trim.externalId","false"));
if (!rootElement.getName().equalsIgnoreCase("studentEnrollments"))
throw new Exception("Given XML file is not a Student Enrollments load file.");
Session session = null;
Set<Long> updatedStudents = new HashSet<Long>();
try {
String campus = rootElement.attributeValue("campus");
String year = rootElement.attributeValue("year");
String term = rootElement.attributeValue("term");
String created = rootElement.attributeValue("created");
beginTransaction();
session = Session.getSessionUsingInitiativeYearTerm(campus, year, term);
if(session == null)
throw new Exception("No session found for the given campus, year, and term.");
HashMap<String, Class_> extId2class = new HashMap<String, Class_>();
HashMap<String, Class_> name2class = new HashMap<String, Class_>();
HashMap<String, CourseOffering> extId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> name2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cextId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cname2course = new HashMap<String, CourseOffering>();
HashMap<Long, Set<CourseOffering>> class2courses = new HashMap<Long, Set<CourseOffering>>();
for (Object[] o: (List<Object[]>)getHibSession().createQuery(
"select c, co from Class_ c inner join c.schedulingSubpart.instrOfferingConfig.instructionalOffering.courseOfferings co where " +
"c.schedulingSubpart.instrOfferingConfig.instructionalOffering.session.uniqueId = :sessionId")
.setLong("sessionId", session.getUniqueId()).list()) {
Class_ clazz = (Class_)o[0];
CourseOffering course = (CourseOffering)o[1];
String extId = clazz.getExternalId(course);
if (extId != null && !extId.isEmpty())
extId2class.put(extId, clazz);
String name = clazz.getClassLabel(course);
name2class.put(name, clazz);
name2course.put(name, course);
if (!extId2course.containsKey(extId) || course.isIsControl())
extId2course.put(extId, course);
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course.getExternalUniqueId() != null && !course.getExternalUniqueId().isEmpty())
cextId2course.put(course.getExternalUniqueId(), course);
cname2course.put(course.getCourseName(), course);
if (courses == null) {
courses = new HashSet<CourseOffering>();
class2courses.put(clazz.getUniqueId(), courses);
}
courses.add(course);
}
debug("classes loaded");
if (created != null)
ChangeLog.addChange(getHibSession(), getManager(), session, session, created, ChangeLog.Source.DATA_IMPORT_STUDENT_ENROLLMENTS, ChangeLog.Operation.UPDATE, null, null);
Hashtable<String, Student> students = new Hashtable<String, Student>();
for (Student student: StudentDAO.getInstance().findBySession(getHibSession(), session.getUniqueId())) {
if (student.getExternalUniqueId() != null)
students.put(student.getExternalUniqueId(), student);
}
for (Iterator i = rootElement.elementIterator("student"); i.hasNext(); ) {
Element studentElement = (Element) i.next();
String externalId = studentElement.attributeValue("externalId");
if (externalId == null) continue;
while (trimLeadingZerosFromExternalId && externalId.startsWith("0")) externalId = externalId.substring(1);
Student student = students.remove(externalId);
if (student == null) {
student = new Student();
student.setSession(session);
student.setFirstName(studentElement.attributeValue("firstName", "Name"));
student.setMiddleName(studentElement.attributeValue("middleName"));
student.setLastName(studentElement.attributeValue("lastName", "Unknown"));
student.setEmail(studentElement.attributeValue("email"));
student.setExternalUniqueId(externalId);
student.setFreeTimeCategory(0);
student.setSchedulePreference(0);
student.setClassEnrollments(new HashSet<StudentClassEnrollment>());
}
Hashtable<Long, StudentClassEnrollment> enrollments = new Hashtable<Long, StudentClassEnrollment>();
for (StudentClassEnrollment enrollment: student.getClassEnrollments()) {
enrollments.put(enrollment.getClazz().getUniqueId(), enrollment);
}
for (Iterator j = studentElement.elementIterator("class"); j.hasNext(); ) {
Element classElement = (Element) j.next();
Class_ clazz = null;
CourseOffering course = null;
String classExternalId = classElement.attributeValue("externalId");
if (classExternalId != null) {
clazz = extId2class.get(classExternalId);
course = extId2course.get(classExternalId);
if (clazz == null) {
clazz = name2class.get(classExternalId);
course = name2course.get(classExternalId);
}
}
if (clazz == null && classElement.attributeValue("name") != null) {
String className = classElement.attributeValue("name");
clazz = name2class.get(className);
course = name2course.get(className);
}
String courseName = classElement.attributeValue("course");
if (courseName != null) {
course = cname2course.get(courseName);
} else {
String subject = classElement.attributeValue("subject");
String courseNbr = classElement.attributeValue("courseNbr");
if (subject != null && courseNbr != null)
course = cname2course.get(subject + " " + courseNbr);
}
if (course != null && clazz == null) {
String type = classElement.attributeValue("type");
String suffix = classElement.attributeValue("suffix");
if (type != null && suffix != null)
clazz = name2class.get(course.getCourseName() + " " + type.trim() + " " + suffix);
}
if (clazz == null) {
warn("Class " + (classExternalId != null ? classExternalId : classElement.attributeValue("name",
classElement.attributeValue("course", classElement.attributeValue("subject") + " " + classElement.attributeValue("courseNbr")) + " " +
classElement.attributeValue("type") + " " + classElement.attributeValue("suffix"))) + " not found.");
continue;
}
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course == null || !courses.contains(course)) {
for (CourseOffering co: courses)
if (co.isIsControl())
{ course = co; break; }
}
StudentClassEnrollment enrollment = enrollments.remove(clazz.getUniqueId());
if (enrollment != null) continue; // enrollment already exists
enrollment = new StudentClassEnrollment();
enrollment.setStudent(student);
enrollment.setClazz(clazz);
enrollment.setCourseOffering(course);
enrollment.setTimestamp(new java.util.Date());
student.getClassEnrollments().add(enrollment);
if (student.getUniqueId() != null) updatedStudents.add(student.getUniqueId());
}
if (!enrollments.isEmpty()) {
for (StudentClassEnrollment enrollment: enrollments.values()) {
student.getClassEnrollments().remove(enrollment);
getHibSession().delete(enrollment);
updatedStudents.add(student.getUniqueId());
}
}
if (student.getUniqueId() == null) {
updatedStudents.add((Long)getHibSession().save(student));
} else {
getHibSession().update(student);
}
flushIfNeeded(true);
}
for (Student student: students.values()) {
for (Iterator<StudentClassEnrollment> i = student.getClassEnrollments().iterator(); i.hasNext(); ) {
StudentClassEnrollment enrollment = i.next();
getHibSession().delete(enrollment);
i.remove();
updatedStudents.add(student.getUniqueId());
}
getHibSession().update(student);
}
if (!updatedStudents.isEmpty())
StudentSectioningQueue.studentChanged(getHibSession(), session.getUniqueId(), updatedStudents);
commitTransaction();
debug(updatedStudents.size() + " students changed");
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
throw e;
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.finalExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeFinal, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.midtermExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeMidterm, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session != null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.class.updateEnrollments","true"))){
org.hibernate.Session hibSession = new _RootDAO().createNewSession();
try {
info(" Updating class enrollments...");
Class_.updateClassEnrollmentForSession(session, hibSession);
info(" Updating course offering enrollments...");
CourseOffering.updateCourseOfferingEnrollmentForSession(session, hibSession);
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
} finally {
hibSession.close();
}
}
}
| public void loadXml(Element rootElement) throws Exception {
boolean trimLeadingZerosFromExternalId = "true".equals(ApplicationProperties.getProperty("tmtbl.data.exchange.trim.externalId","false"));
if (!rootElement.getName().equalsIgnoreCase("studentEnrollments"))
throw new Exception("Given XML file is not a Student Enrollments load file.");
Session session = null;
Set<Long> updatedStudents = new HashSet<Long>();
try {
String campus = rootElement.attributeValue("campus");
String year = rootElement.attributeValue("year");
String term = rootElement.attributeValue("term");
String created = rootElement.attributeValue("created");
beginTransaction();
session = Session.getSessionUsingInitiativeYearTerm(campus, year, term);
if(session == null)
throw new Exception("No session found for the given campus, year, and term.");
HashMap<String, Class_> extId2class = new HashMap<String, Class_>();
HashMap<String, Class_> name2class = new HashMap<String, Class_>();
HashMap<String, CourseOffering> extId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> name2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cextId2course = new HashMap<String, CourseOffering>();
HashMap<String, CourseOffering> cname2course = new HashMap<String, CourseOffering>();
HashMap<Long, Set<CourseOffering>> class2courses = new HashMap<Long, Set<CourseOffering>>();
for (Object[] o: (List<Object[]>)getHibSession().createQuery(
"select c, co from Class_ c inner join c.schedulingSubpart.instrOfferingConfig.instructionalOffering.courseOfferings co where " +
"c.schedulingSubpart.instrOfferingConfig.instructionalOffering.session.uniqueId = :sessionId")
.setLong("sessionId", session.getUniqueId()).list()) {
Class_ clazz = (Class_)o[0];
CourseOffering course = (CourseOffering)o[1];
String extId = clazz.getExternalId(course);
if (extId != null && !extId.isEmpty())
extId2class.put(extId, clazz);
String name = clazz.getClassLabel(course);
name2class.put(name, clazz);
name2course.put(name, course);
if (!extId2course.containsKey(extId) || course.isIsControl())
extId2course.put(extId, course);
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course.getExternalUniqueId() != null && !course.getExternalUniqueId().isEmpty())
cextId2course.put(course.getExternalUniqueId(), course);
cname2course.put(course.getCourseName(), course);
if (courses == null) {
courses = new HashSet<CourseOffering>();
class2courses.put(clazz.getUniqueId(), courses);
}
courses.add(course);
}
debug("classes loaded");
if (created != null)
ChangeLog.addChange(getHibSession(), getManager(), session, session, created, ChangeLog.Source.DATA_IMPORT_STUDENT_ENROLLMENTS, ChangeLog.Operation.UPDATE, null, null);
Hashtable<String, Student> students = new Hashtable<String, Student>();
for (Student student: StudentDAO.getInstance().findBySession(getHibSession(), session.getUniqueId())) {
if (student.getExternalUniqueId() != null)
students.put(student.getExternalUniqueId(), student);
}
for (Iterator i = rootElement.elementIterator("student"); i.hasNext(); ) {
Element studentElement = (Element) i.next();
String externalId = studentElement.attributeValue("externalId");
if (externalId == null) continue;
while (trimLeadingZerosFromExternalId && externalId.startsWith("0")) externalId = externalId.substring(1);
Student student = students.remove(externalId);
if (student == null) {
student = new Student();
student.setSession(session);
student.setFirstName(studentElement.attributeValue("firstName", "Name"));
student.setMiddleName(studentElement.attributeValue("middleName"));
student.setLastName(studentElement.attributeValue("lastName", "Unknown"));
student.setEmail(studentElement.attributeValue("email"));
student.setExternalUniqueId(externalId);
student.setFreeTimeCategory(0);
student.setSchedulePreference(0);
student.setClassEnrollments(new HashSet<StudentClassEnrollment>());
}
Hashtable<Long, StudentClassEnrollment> enrollments = new Hashtable<Long, StudentClassEnrollment>();
for (StudentClassEnrollment enrollment: student.getClassEnrollments()) {
enrollments.put(enrollment.getClazz().getUniqueId(), enrollment);
}
for (Iterator j = studentElement.elementIterator("class"); j.hasNext(); ) {
Element classElement = (Element) j.next();
Class_ clazz = null;
CourseOffering course = null;
String classExternalId = classElement.attributeValue("externalId");
if (classExternalId != null) {
clazz = extId2class.get(classExternalId);
course = extId2course.get(classExternalId);
if (clazz == null) {
clazz = name2class.get(classExternalId);
course = name2course.get(classExternalId);
}
}
if (clazz == null && classElement.attributeValue("name") != null) {
String className = classElement.attributeValue("name");
clazz = name2class.get(className);
course = name2course.get(className);
}
String courseName = classElement.attributeValue("course");
if (courseName != null) {
course = cname2course.get(courseName);
} else {
String subject = classElement.attributeValue("subject");
String courseNbr = classElement.attributeValue("courseNbr");
if (subject != null && courseNbr != null)
course = cname2course.get(subject + " " + courseNbr);
}
if (course != null && clazz == null) {
String type = classElement.attributeValue("type");
String suffix = classElement.attributeValue("suffix");
if (type != null && suffix != null)
clazz = name2class.get(course.getCourseName() + " " + type.trim() + " " + suffix);
}
if (clazz == null) {
warn("Class " + (classExternalId != null ? classExternalId : classElement.attributeValue("name",
classElement.attributeValue("course", classElement.attributeValue("subject") + " " + classElement.attributeValue("courseNbr")) + " " +
classElement.attributeValue("type") + " " + classElement.attributeValue("suffix"))) + " not found.");
continue;
}
Set<CourseOffering> courses = class2courses.get(clazz.getUniqueId());
if (course == null || !courses.contains(course)) {
for (CourseOffering co: courses)
if (co.isIsControl())
{ course = co; break; }
}
StudentClassEnrollment enrollment = enrollments.remove(clazz.getUniqueId());
if (enrollment != null) continue; // enrollment already exists
enrollment = new StudentClassEnrollment();
enrollment.setStudent(student);
enrollment.setClazz(clazz);
enrollment.setCourseOffering(course);
enrollment.setTimestamp(new java.util.Date());
student.getClassEnrollments().add(enrollment);
if (student.getUniqueId() != null) updatedStudents.add(student.getUniqueId());
}
if (!enrollments.isEmpty()) {
for (StudentClassEnrollment enrollment: enrollments.values()) {
student.getClassEnrollments().remove(enrollment);
getHibSession().delete(enrollment);
updatedStudents.add(student.getUniqueId());
}
}
if (student.getUniqueId() == null) {
updatedStudents.add((Long)getHibSession().save(student));
} else {
getHibSession().update(student);
}
}
for (Student student: students.values()) {
for (Iterator<StudentClassEnrollment> i = student.getClassEnrollments().iterator(); i.hasNext(); ) {
StudentClassEnrollment enrollment = i.next();
getHibSession().delete(enrollment);
i.remove();
updatedStudents.add(student.getUniqueId());
}
getHibSession().update(student);
}
if (!updatedStudents.isEmpty())
StudentSectioningQueue.studentChanged(getHibSession(), session.getUniqueId(), updatedStudents);
commitTransaction();
debug(updatedStudents.size() + " students changed");
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
throw e;
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.finalExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeFinal, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session!=null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.midtermExam.updateConflicts","false"))) {
try {
beginTransaction();
new UpdateExamConflicts(this).update(session.getUniqueId(), Exam.sExamTypeMidterm, getHibSession());
commitTransaction();
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
rollbackTransaction();
}
}
if (session != null && "true".equals(ApplicationProperties.getProperty("tmtbl.data.import.studentEnrl.class.updateEnrollments","true"))){
org.hibernate.Session hibSession = new _RootDAO().createNewSession();
try {
info(" Updating class enrollments...");
Class_.updateClassEnrollmentForSession(session, hibSession);
info(" Updating course offering enrollments...");
CourseOffering.updateCourseOfferingEnrollmentForSession(session, hibSession);
} catch (Exception e) {
fatal("Exception: " + e.getMessage(), e);
} finally {
hibSession.close();
}
}
}
|
diff --git a/modules/org.pathvisio.desktop/src/org/pathvisio/desktop/GuiMain.java b/modules/org.pathvisio.desktop/src/org/pathvisio/desktop/GuiMain.java
index 846addaa..eb34a3da 100644
--- a/modules/org.pathvisio.desktop/src/org/pathvisio/desktop/GuiMain.java
+++ b/modules/org.pathvisio.desktop/src/org/pathvisio/desktop/GuiMain.java
@@ -1,376 +1,376 @@
// PathVisio,
// a tool for data visualization and analysis using Biological Pathways
// Copyright 2006-2009 BiGCaT Bioinformatics
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.pathvisio.desktop;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Locale;
import javax.swing.BoxLayout;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.UIManager;
import org.bridgedb.IDMapperException;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleException;
import org.pathvisio.core.Engine;
import org.pathvisio.core.Globals;
import org.pathvisio.core.data.GdbEvent;
import org.pathvisio.core.data.GdbManager;
import org.pathvisio.core.data.GdbManager.GdbEventListener;
import org.pathvisio.core.debug.Logger;
import org.pathvisio.core.model.DataNodeListExporter;
import org.pathvisio.core.model.EUGeneExporter;
import org.pathvisio.core.model.GpmlFormat;
import org.pathvisio.core.model.ImageExporter;
import org.pathvisio.core.model.MappFormat;
import org.pathvisio.core.preferences.GlobalPreference;
import org.pathvisio.core.preferences.PreferenceManager;
import org.pathvisio.core.view.MIMShapes;
import org.pathvisio.desktop.gex.GexManager;
import org.pathvisio.desktop.gex.SimpleGex;
import org.pathvisio.desktop.gex.GexManager.GexManagerEvent;
import org.pathvisio.desktop.gex.GexManager.GexManagerListener;
import org.pathvisio.desktop.model.BatikImageWithDataExporter;
import org.pathvisio.desktop.model.RasterImageWithDataExporter;
import org.pathvisio.desktop.visualization.VisualizationManager;
import org.pathvisio.gui.MainPanel;
import org.pathvisio.gui.SwingEngine;
import org.pathvisio.gui.SwingEngine.Browser;
import edu.stanford.ejalbert.BrowserLauncher;
/**
* Main class for the Swing GUI. This class creates and shows the GUI.
* Subclasses may override {@link #createAndShowGUI(MainPanelStandalone, SwingEngine)} to perform custom
* actions before showing the GUI.
* @author thomas
*
*/
public class GuiMain implements GdbEventListener, GexManagerListener
{
GuiMain() {
}
private MainPanelStandalone mainPanel;
private PvDesktop pvDesktop;
private SwingEngine swingEngine;
public AutoSave auto; // needs to be here for the same timer to be available always
private static void initLog(Engine engine)
{
String logDest = PreferenceManager.getCurrent().get(GlobalPreference.FILE_LOG);
Logger.log.setDest (logDest);
Logger.log.setLogLevel(true, true, true, true, true, true);//Modify this to adjust log level
Logger.log.info("Application name: " + engine.getApplicationName() +
" revision: " + Engine.getRevision());
Logger.log.info("os.name: " + System.getProperty("os.name") +
" os.version: " + System.getProperty("os.version") +
" java.version: " + System.getProperty ("java.version"));
Logger.log.info ("Locale: " + Locale.getDefault().getDisplayName());
}
private void openPathwayFile(String pathwayFile) {
File f = new File(pathwayFile);
URL url;
//Assume the argument is a file
if(f.exists()) {
swingEngine.openPathway(f);
} else {
//If it doesn't exist, assume it's an url
try {
url = new URL(pathwayFile);
swingEngine.openPathway(url);
} catch(MalformedURLException e) {
Logger.log.error("Couldn't open pathway url " + pathwayFile);
}
}
}
// this is only a workaround to hand over the pathway and pgex file
// from the command line when using the launcher
// TODO: find better solution
public static final String ARG_PROPERTY_PGEX = "pathvisio.pgex";
public static final String ARG_PROPERTY_PATHWAYFILE = "pathvisio.pathwayfile";
/**
* Act upon the command line arguments
*/
public void processOptions() {
//Create a plugin manager that loads the plugins
pvDesktop.initPlugins();
String str = System.getProperty(ARG_PROPERTY_PATHWAYFILE);
if (str != null) {
openPathwayFile(str);
}
str = System.getProperty(ARG_PROPERTY_PGEX);
if(str != null) {
try {
pvDesktop.getGexManager().setCurrentGex(str, false);
pvDesktop.loadGexCache();
Logger.log.info ("Loaded pgex " + str);
} catch (IDMapperException e) {
Logger.log.error ("Couldn't open pgex " + str, e);
}
}
}
private String shortenString(String s) {
return shortenString(s, 20);
}
private String shortenString(String s, int maxLength) {
if(s.length() > maxLength) {
String prefix = "...";
s = s.substring(s.length() - maxLength - prefix.length());
s = prefix + s;
}
return s;
}
private void setGdbStatus(JLabel gdbLabel, JLabel mdbLabel) {
String gdb = "" + swingEngine.getGdbManager().getGeneDb();
String mdb = "" + swingEngine.getGdbManager().getMetaboliteDb();
gdbLabel.setText(gdb != null ? (" | Gene database: " + shortenString(gdb)) : "");
mdbLabel.setText(mdb != null ? (" | Metabolite database: " + shortenString(mdb)) : "");
gdbLabel.setToolTipText(gdb != null ? gdb : "");
mdbLabel.setToolTipText(mdb != null ? mdb : "");
}
public void gdbEvent(GdbEvent e) {
setGdbStatus(gdbLabel, mdbLabel);
}
public void gexManagerEvent(GexManagerEvent e)
{
if(e.getType() == GexManagerEvent.CONNECTION_OPENED ||
e.getType() == GexManagerEvent.CONNECTION_CLOSED)
{
SimpleGex gex = pvDesktop.getGexManager().getCurrentGex();
if(gex != null && gex.isConnected()) {
gexLabel.setText(" | Dataset: " + shortenString(gex.getDbName()));
gexLabel.setToolTipText(gex.getDbName());
} else {
gexLabel.setText("");
gexLabel.setToolTipText("");
}
}
}
private JLabel gdbLabel;
private JLabel mdbLabel;
private JLabel gexLabel;
/**
* Creates and shows the GUI. Creates and shows the Frame, sets the size, title and menubar.
* @param mainPanel The main panel to show in the frame
*/
protected JFrame createAndShowGUI(final MainPanelStandalone mainPanel, final SwingEngine swingEngine)
{
//Create and set up the window.
final JFrame frame = new JFrame(Globals.APPLICATION_NAME);
// dispose on close, otherwise windowClosed event is not called.
frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
frame.add(mainPanel, BorderLayout.CENTER);
JPanel statusBar = new JPanel();
statusBar.setLayout(new BoxLayout(statusBar, BoxLayout.X_AXIS));
frame.add(statusBar, BorderLayout.SOUTH);
gdbLabel = new JLabel();
mdbLabel = new JLabel();
gexLabel = new JLabel();
statusBar.add(gdbLabel);
statusBar.add(mdbLabel);
statusBar.add(gexLabel);
setGdbStatus(gdbLabel, mdbLabel);
swingEngine.getGdbManager().addGdbEventListener(this);
pvDesktop.getGexManager().addListener(this);
frame.setJMenuBar(mainPanel.getMenuBar());
frame.pack();
PreferenceManager preferences = PreferenceManager.getCurrent();
frame.setSize(preferences.getInt(GlobalPreference.WIN_W), preferences.getInt(GlobalPreference.WIN_H));
int x = preferences.getInt(GlobalPreference.WIN_X);
int y = preferences.getInt(GlobalPreference.WIN_Y);
- if(x > 0 && y > 0) frame.setLocation(x, y);
+ if(x >= 0 && y >= 0) frame.setLocation(x, y);
frame.addWindowListener(new WindowAdapter()
{
@Override
public void windowClosing(WindowEvent we)
{
PreferenceManager prefs = PreferenceManager.getCurrent();
JFrame frame = swingEngine.getFrame();
Dimension size = frame.getSize();
Point p = frame.getLocationOnScreen();
prefs.setInt(GlobalPreference.WIN_W, size.width);
prefs.setInt(GlobalPreference.WIN_H, size.height);
prefs.setInt(GlobalPreference.WIN_X, p.x);
prefs.setInt(GlobalPreference.WIN_Y, p.y);
if(swingEngine.canDiscardPathway()) {
frame.dispose();
}
}
@Override
public void windowClosed(WindowEvent we)
{
GuiMain.this.shutdown(swingEngine);
// stops all bundles, exceptions are no real problem
// because System.exit(0) should stop everything anyway
for(Bundle bundle : pvDesktop.getContext().getBundles()) {
if(bundle.getState() == Bundle.ACTIVE) {
try {
bundle.stop();
} catch (BundleException e) {}
}
}
// added system exit, so the application closes after the window is closed
System.exit(0);
}
});
//Display the window.
frame.setVisible(true);
int spPercent = PreferenceManager.getCurrent().getInt (GlobalPreference.GUI_SIDEPANEL_SIZE);
double spSize = (100 - spPercent) / 100.0;
mainPanel.getSplitPane().setDividerLocation(spSize);
return frame;
}
private void shutdown(SwingEngine swingEngine)
{
PreferenceManager prefs = PreferenceManager.getCurrent();
prefs.store();
//explicit clean shutdown of gdb prevents file from being left open
if (swingEngine.getGdbManager().isConnected())
{
try
{
swingEngine.getGdbManager().getCurrentGdb().close();
}
catch (IDMapperException ex)
{
Logger.log.error ("Couldn't cleanly close pgdb database", ex);
}
}
swingEngine.getGdbManager().removeGdbEventListener(this);
mainPanel.dispose();
pvDesktop.getGexManager().removeListener(this);
pvDesktop.dispose();
swingEngine.getEngine().dispose();
swingEngine.dispose();
Logger.log.info ("PathVisio was shut down cleanly");
// stop the timer and clean out the files on a successful shutdown
auto.stopTimer();
}
public MainPanel getMainPanel() { return mainPanel; }
public void init(PvDesktop pvDesktop) {
this.pvDesktop = pvDesktop;
Engine engine = pvDesktop.getSwingEngine().getEngine();
initLog(engine);
engine.setApplicationName("PathVisio " + Engine.getVersion());
if (PreferenceManager.getCurrent().getBoolean(GlobalPreference.USE_SYSTEM_LOOK_AND_FEEL))
{
try {
UIManager.setLookAndFeel(
UIManager.getSystemLookAndFeelClassName());
} catch (Exception ex) {
Logger.log.error("Unable to load native look and feel", ex);
}
}
swingEngine = pvDesktop.getSwingEngine();
swingEngine.setUrlBrowser(new Browser() {
public void openUrl(URL url) {
try {
BrowserLauncher b = new BrowserLauncher(null);
b.openURLinBrowser(url.toString());
} catch (Exception ex) {
Logger.log.error ("Couldn't open url '" + url + "'", ex);
}
}
});
swingEngine.getGdbManager().initPreferred();
mainPanel = new MainPanelStandalone(pvDesktop);
mainPanel.createAndShowGUI();
JFrame frame = createAndShowGUI(mainPanel, swingEngine);
initImporters(engine);
initExporters(engine, swingEngine.getGdbManager());
MIMShapes.registerShapes();
swingEngine.setFrame(frame);
swingEngine.setApplicationPanel(mainPanel);
// start the autosave timer
auto = new AutoSave(swingEngine);
auto.startTimer(300);
processOptions();
}
private void initImporters(Engine engine)
{
engine.addPathwayImporter(new MappFormat());
engine.addPathwayImporter(new GpmlFormat());
}
private void initExporters(Engine engine, GdbManager gdbManager)
{
engine.addPathwayExporter(new MappFormat());
engine.addPathwayExporter(new GpmlFormat());
GexManager gex = pvDesktop.getGexManager();
VisualizationManager vis = pvDesktop.getVisualizationManager();
engine.addPathwayExporter(new RasterImageWithDataExporter(ImageExporter.TYPE_PNG, gex, vis));
engine.addPathwayExporter(new BatikImageWithDataExporter(ImageExporter.TYPE_SVG, gex, vis));
engine.addPathwayExporter(new BatikImageWithDataExporter(ImageExporter.TYPE_TIFF, gex, vis));
engine.addPathwayExporter(new BatikImageWithDataExporter(ImageExporter.TYPE_PDF, gex, vis));
engine.addPathwayExporter(new DataNodeListExporter(gdbManager));
engine.addPathwayExporter(new EUGeneExporter());
}
}
| true | true | protected JFrame createAndShowGUI(final MainPanelStandalone mainPanel, final SwingEngine swingEngine)
{
//Create and set up the window.
final JFrame frame = new JFrame(Globals.APPLICATION_NAME);
// dispose on close, otherwise windowClosed event is not called.
frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
frame.add(mainPanel, BorderLayout.CENTER);
JPanel statusBar = new JPanel();
statusBar.setLayout(new BoxLayout(statusBar, BoxLayout.X_AXIS));
frame.add(statusBar, BorderLayout.SOUTH);
gdbLabel = new JLabel();
mdbLabel = new JLabel();
gexLabel = new JLabel();
statusBar.add(gdbLabel);
statusBar.add(mdbLabel);
statusBar.add(gexLabel);
setGdbStatus(gdbLabel, mdbLabel);
swingEngine.getGdbManager().addGdbEventListener(this);
pvDesktop.getGexManager().addListener(this);
frame.setJMenuBar(mainPanel.getMenuBar());
frame.pack();
PreferenceManager preferences = PreferenceManager.getCurrent();
frame.setSize(preferences.getInt(GlobalPreference.WIN_W), preferences.getInt(GlobalPreference.WIN_H));
int x = preferences.getInt(GlobalPreference.WIN_X);
int y = preferences.getInt(GlobalPreference.WIN_Y);
if(x > 0 && y > 0) frame.setLocation(x, y);
frame.addWindowListener(new WindowAdapter()
{
@Override
public void windowClosing(WindowEvent we)
{
PreferenceManager prefs = PreferenceManager.getCurrent();
JFrame frame = swingEngine.getFrame();
Dimension size = frame.getSize();
Point p = frame.getLocationOnScreen();
prefs.setInt(GlobalPreference.WIN_W, size.width);
prefs.setInt(GlobalPreference.WIN_H, size.height);
prefs.setInt(GlobalPreference.WIN_X, p.x);
prefs.setInt(GlobalPreference.WIN_Y, p.y);
if(swingEngine.canDiscardPathway()) {
frame.dispose();
}
}
@Override
public void windowClosed(WindowEvent we)
{
GuiMain.this.shutdown(swingEngine);
// stops all bundles, exceptions are no real problem
// because System.exit(0) should stop everything anyway
for(Bundle bundle : pvDesktop.getContext().getBundles()) {
if(bundle.getState() == Bundle.ACTIVE) {
try {
bundle.stop();
} catch (BundleException e) {}
}
}
// added system exit, so the application closes after the window is closed
System.exit(0);
}
});
//Display the window.
frame.setVisible(true);
int spPercent = PreferenceManager.getCurrent().getInt (GlobalPreference.GUI_SIDEPANEL_SIZE);
double spSize = (100 - spPercent) / 100.0;
mainPanel.getSplitPane().setDividerLocation(spSize);
return frame;
}
| protected JFrame createAndShowGUI(final MainPanelStandalone mainPanel, final SwingEngine swingEngine)
{
//Create and set up the window.
final JFrame frame = new JFrame(Globals.APPLICATION_NAME);
// dispose on close, otherwise windowClosed event is not called.
frame.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
frame.add(mainPanel, BorderLayout.CENTER);
JPanel statusBar = new JPanel();
statusBar.setLayout(new BoxLayout(statusBar, BoxLayout.X_AXIS));
frame.add(statusBar, BorderLayout.SOUTH);
gdbLabel = new JLabel();
mdbLabel = new JLabel();
gexLabel = new JLabel();
statusBar.add(gdbLabel);
statusBar.add(mdbLabel);
statusBar.add(gexLabel);
setGdbStatus(gdbLabel, mdbLabel);
swingEngine.getGdbManager().addGdbEventListener(this);
pvDesktop.getGexManager().addListener(this);
frame.setJMenuBar(mainPanel.getMenuBar());
frame.pack();
PreferenceManager preferences = PreferenceManager.getCurrent();
frame.setSize(preferences.getInt(GlobalPreference.WIN_W), preferences.getInt(GlobalPreference.WIN_H));
int x = preferences.getInt(GlobalPreference.WIN_X);
int y = preferences.getInt(GlobalPreference.WIN_Y);
if(x >= 0 && y >= 0) frame.setLocation(x, y);
frame.addWindowListener(new WindowAdapter()
{
@Override
public void windowClosing(WindowEvent we)
{
PreferenceManager prefs = PreferenceManager.getCurrent();
JFrame frame = swingEngine.getFrame();
Dimension size = frame.getSize();
Point p = frame.getLocationOnScreen();
prefs.setInt(GlobalPreference.WIN_W, size.width);
prefs.setInt(GlobalPreference.WIN_H, size.height);
prefs.setInt(GlobalPreference.WIN_X, p.x);
prefs.setInt(GlobalPreference.WIN_Y, p.y);
if(swingEngine.canDiscardPathway()) {
frame.dispose();
}
}
@Override
public void windowClosed(WindowEvent we)
{
GuiMain.this.shutdown(swingEngine);
// stops all bundles, exceptions are no real problem
// because System.exit(0) should stop everything anyway
for(Bundle bundle : pvDesktop.getContext().getBundles()) {
if(bundle.getState() == Bundle.ACTIVE) {
try {
bundle.stop();
} catch (BundleException e) {}
}
}
// added system exit, so the application closes after the window is closed
System.exit(0);
}
});
//Display the window.
frame.setVisible(true);
int spPercent = PreferenceManager.getCurrent().getInt (GlobalPreference.GUI_SIDEPANEL_SIZE);
double spSize = (100 - spPercent) / 100.0;
mainPanel.getSplitPane().setDividerLocation(spSize);
return frame;
}
|
diff --git a/src/streamfish/MainMenu.java b/src/streamfish/MainMenu.java
index 60e50c5..10dd381 100644
--- a/src/streamfish/MainMenu.java
+++ b/src/streamfish/MainMenu.java
@@ -1,597 +1,600 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package streamfish;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import static javax.swing.JOptionPane.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
/**
*
* @author Kristian
*/
public class MainMenu extends javax.swing.JPanel {
private int kundenr = -1;
private final GUI gui;
private Customer[] customers;
private int viewRow = -1;
private Orderinfo[] orderinfo;
private Subscription[] subscriptions;
/**
* Creates new form MainMenu
*/
public MainMenu(final GUI gui) {
this.gui = gui;
gui.setTitle("Main Menu");
initComponents();
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
// jTable1.setModel();1
// from here tab2 NorC
tab1setup();
tab2setup();
tab3setup();
//to here tab2 Norc
jTextField1.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
if (jTabbedPane1.getSelectedIndex() == 0) {
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
} else if (jTabbedPane1.getSelectedIndex() == 1) {
orderinfo = gui.getTodaysTasks(jTextField1.getText());
DefaultTableModel model = (DefaultTableModel) jTable2.getModel();
model.setRowCount(0);
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
}
}
@Override
public void removeUpdate(DocumentEvent e) {
if (jTabbedPane1.getSelectedIndex() == 0) {
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
} else if (jTabbedPane1.getSelectedIndex() == 1) {
orderinfo = gui.getTodaysTasks(jTextField1.getText());
DefaultTableModel model = (DefaultTableModel) jTable2.getModel();
model.setRowCount(0);
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
}
}
@Override
public void changedUpdate(DocumentEvent e) {
if (jTabbedPane1.getSelectedIndex() == 0) {
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
} else if (jTabbedPane1.getSelectedIndex() == 1) {
orderinfo = gui.getTodaysTasks(jTextField1.getText());
DefaultTableModel model = (DefaultTableModel) jTable2.getModel();
model.setRowCount(0);
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
}
}
});
jCheckBox1.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (jTabbedPane1.getSelectedIndex() == 0) {
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
} else if (jTabbedPane1.getSelectedIndex() == 1) {
orderinfo = gui.getTodaysTasks(jTextField1.getText());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
}
}
});
}
private void tab1setup() {
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
jTable1.getSelectionModel().addListSelectionListener(
new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent event) {
int viewRow = jTable1.getSelectedRow();
if (!event.getValueIsAdjusting()) {
try {
kundenr = Integer.parseInt(jTable1.getValueAt(viewRow, 0).toString());
} catch (Exception e) {
}
}
}
});
}
private void tab2setup() {
orderinfo = gui.getTodaysTasks("");
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
DefaultTableModel model = (DefaultTableModel) jTable2.getModel();
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
jTable2.getSelectionModel().addListSelectionListener(
new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent event) {
if (!event.getValueIsAdjusting()) {
viewRow = jTable2.getSelectedRow();
}
}
});
}
private void tab3setup() {
subscriptions = gui.getSubscriptions(jTextField1.getText());
if (subscriptions != null && subscriptions.length > 0) {
for (int i = 0; i < subscriptions.length; i++) {
DefaultTableModel model = (DefaultTableModel) jTable3.getModel();
model.addRow(new Object[]{
subscriptions[i].getCustomerName(gui.getOrderfromSub(subscriptions[i]), gui),
subscriptions[i].getDuration(),
subscriptions[i].getDayofWeek(),
subscriptions[i].getMenuName(gui.getOrderfromSub(subscriptions[i]), gui)
});
}
}
jTable3.getSelectionModel().addListSelectionListener(
new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent event) {
if (!event.getValueIsAdjusting()) {
viewRow = jTable3.getSelectedRow();
}
}
});
}
public void updt() {
customers = gui.getCustomers(jTextField1.getText(), jCheckBox1.isSelected());
DefaultTableModel model = (DefaultTableModel) jTable1.getModel();
model.setRowCount(0);
if (customers != null && customers.length > 0) {
for (int i = 0; i < customers.length; i++) {
model.addRow(new Object[]{customers[i].getCustomerID(), customers[i].getCustomerName(), customers[i].getPhoneNumber(), customers[i].isBusiness()});
}
}
}
public void updtTodaysTasks() {
orderinfo = gui.getTodaysTasks("");
DefaultTableModel model = (DefaultTableModel) jTable2.getModel();
model.setRowCount(0);
if (orderinfo != null && orderinfo.length > 0) {
for (int i = 0; i < orderinfo.length; i++) {
model.addRow(new Object[]{orderinfo[i].getAddress(), orderinfo[i].getCustomerName(), orderinfo[i].getPhone()});
}
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jTextField1 = new javax.swing.JTextField();
jLabel1 = new javax.swing.JLabel();
jCheckBox1 = new javax.swing.JCheckBox();
jButton5 = new javax.swing.JButton();
jTabbedPane1 = new javax.swing.JTabbedPane();
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jPanel2 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
jTable2 = new javax.swing.JTable();
jButton4 = new javax.swing.JButton();
jPanel3 = new javax.swing.JPanel();
jScrollPane3 = new javax.swing.JScrollPane();
jTable3 = new javax.swing.JTable();
jButton7 = new javax.swing.JButton();
jButton6 = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
jButton8 = new javax.swing.JButton();
jButton9 = new javax.swing.JButton();
jLabel1.setText("Search:");
jCheckBox1.setText("Show only inactive");
jButton5.setText("Storage");
jButton5.setToolTipText("");
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
jTable1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer_id", "Customer name", "Phone", "Business"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jTable1.setMaximumSize(new java.awt.Dimension(300, 64));
jTable1.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
+ jTable1.getTableHeader().setReorderingAllowed(false);
jScrollPane2.setViewportView(jTable1);
jTable1.getColumnModel().getColumn(0).setResizable(false);
jTable1.getColumnModel().getColumn(1).setResizable(false);
jTable1.getColumnModel().getColumn(2).setResizable(false);
jTable1.getColumnModel().getColumn(3).setResizable(false);
jButton1.setText("Register customer");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setText("Register order");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setText("Edit customer");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jButton1)
.addGap(60, 60, 60)
.addComponent(jButton3)
.addGap(58, 58, 58)
- .addComponent(jButton2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
+ .addComponent(jButton2, javax.swing.GroupLayout.DEFAULT_SIZE, 197, Short.MAX_VALUE)
.addGap(18, 18, 18))
.addComponent(jScrollPane2))
.addGap(0, 0, 0))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 186, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2)
.addComponent(jButton3))
.addGap(14, 14, 14))
);
jTabbedPane1.addTab("Customers", jPanel1);
jTable2.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Address", "Name", "Phone number"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
+ jTable2.getTableHeader().setReorderingAllowed(false);
jScrollPane1.setViewportView(jTable2);
jButton4.setText("Get info");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(150, 150, 150)
.addComponent(jButton4)
.addGap(150, 150, 150))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton4))
);
jTabbedPane1.addTab("Todays tasks", jPanel2);
jTable3.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer", "Duration", "Day", "Menu name"
}
));
+ jTable3.getTableHeader().setReorderingAllowed(false);
jScrollPane3.setViewportView(jTable3);
jButton7.setText("Delete subscription");
jButton7.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton7ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addContainerGap(175, Short.MAX_VALUE)
.addComponent(jButton7)
.addGap(175, 175, 175))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton7))
);
jTabbedPane1.addTab("Subscriptions", jPanel3);
jButton6.setText("Exit");
jButton6.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton6ActionPerformed(evt);
}
});
jLabel2.setText(TodaysDate.getDate());
jButton8.setText("Statistics");
jButton8.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton8ActionPerformed(evt);
}
});
jButton9.setText("Change password");
jButton9.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton9ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jCheckBox1)
.addGap(18, 18, 18)
.addComponent(jButton5)
.addGap(18, 18, 18)
.addComponent(jButton8)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addComponent(jTabbedPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(jButton6)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton9)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1)
.addComponent(jCheckBox1)
.addComponent(jButton5)
.addComponent(jButton8))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTabbedPane1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton6)
.addComponent(jLabel2)
.addComponent(jButton9))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
if (kundenr == -1) {
showMessageDialog(null, "Ingen kunde er valgt.");
} else {
gui.byttVindu(this, new Reg_ordre(kundenr, gui));
}
}//GEN-LAST:event_jButton2ActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
gui.byttVindu(this, new Reg_kunde(gui));
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
// 0,1,2 if (jTabbedPane1.getSelectedIndex() == 0) {
if (viewRow >= 0) {
new TodaysTasksFrame(orderinfo[viewRow], gui);
} else {
showMessageDialog(null, "Ingen valgt");
}
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed
// TODO add your handling code here:
gui.byttVindu(this, new Storage(gui));
}//GEN-LAST:event_jButton5ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
if (kundenr == -1) {
showMessageDialog(null, "Ingen kunde er valgt.");
} else {
gui.byttVindu(this, new Edit_customer(kundenr, gui));
}
}//GEN-LAST:event_jButton3ActionPerformed
private void jButton6ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton6ActionPerformed
System.exit(0);
}//GEN-LAST:event_jButton6ActionPerformed
private void jButton7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton7ActionPerformed
// TODO add your handling code here:
jTable3.getModel().getValueAt(jTable3.getSelectedRow(), 0);
}//GEN-LAST:event_jButton7ActionPerformed
private void jButton8ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton8ActionPerformed
gui.byttVindu(this, new Statistics(gui));
}//GEN-LAST:event_jButton8ActionPerformed
private void jButton9ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton9ActionPerformed
new Change_password(gui);
}//GEN-LAST:event_jButton9ActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JButton jButton5;
private javax.swing.JButton jButton6;
private javax.swing.JButton jButton7;
private javax.swing.JButton jButton8;
private javax.swing.JButton jButton9;
private javax.swing.JCheckBox jCheckBox1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JScrollPane jScrollPane3;
private javax.swing.JTabbedPane jTabbedPane1;
private javax.swing.JTable jTable1;
private javax.swing.JTable jTable2;
private javax.swing.JTable jTable3;
private javax.swing.JTextField jTextField1;
// End of variables declaration//GEN-END:variables
}
| false | true | private void initComponents() {
jTextField1 = new javax.swing.JTextField();
jLabel1 = new javax.swing.JLabel();
jCheckBox1 = new javax.swing.JCheckBox();
jButton5 = new javax.swing.JButton();
jTabbedPane1 = new javax.swing.JTabbedPane();
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jPanel2 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
jTable2 = new javax.swing.JTable();
jButton4 = new javax.swing.JButton();
jPanel3 = new javax.swing.JPanel();
jScrollPane3 = new javax.swing.JScrollPane();
jTable3 = new javax.swing.JTable();
jButton7 = new javax.swing.JButton();
jButton6 = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
jButton8 = new javax.swing.JButton();
jButton9 = new javax.swing.JButton();
jLabel1.setText("Search:");
jCheckBox1.setText("Show only inactive");
jButton5.setText("Storage");
jButton5.setToolTipText("");
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
jTable1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer_id", "Customer name", "Phone", "Business"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jTable1.setMaximumSize(new java.awt.Dimension(300, 64));
jTable1.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
jScrollPane2.setViewportView(jTable1);
jTable1.getColumnModel().getColumn(0).setResizable(false);
jTable1.getColumnModel().getColumn(1).setResizable(false);
jTable1.getColumnModel().getColumn(2).setResizable(false);
jTable1.getColumnModel().getColumn(3).setResizable(false);
jButton1.setText("Register customer");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setText("Register order");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setText("Edit customer");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jButton1)
.addGap(60, 60, 60)
.addComponent(jButton3)
.addGap(58, 58, 58)
.addComponent(jButton2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGap(18, 18, 18))
.addComponent(jScrollPane2))
.addGap(0, 0, 0))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 186, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2)
.addComponent(jButton3))
.addGap(14, 14, 14))
);
jTabbedPane1.addTab("Customers", jPanel1);
jTable2.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Address", "Name", "Phone number"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jScrollPane1.setViewportView(jTable2);
jButton4.setText("Get info");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(150, 150, 150)
.addComponent(jButton4)
.addGap(150, 150, 150))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton4))
);
jTabbedPane1.addTab("Todays tasks", jPanel2);
jTable3.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer", "Duration", "Day", "Menu name"
}
));
jScrollPane3.setViewportView(jTable3);
jButton7.setText("Delete subscription");
jButton7.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton7ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addContainerGap(175, Short.MAX_VALUE)
.addComponent(jButton7)
.addGap(175, 175, 175))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton7))
);
jTabbedPane1.addTab("Subscriptions", jPanel3);
jButton6.setText("Exit");
jButton6.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton6ActionPerformed(evt);
}
});
jLabel2.setText(TodaysDate.getDate());
jButton8.setText("Statistics");
jButton8.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton8ActionPerformed(evt);
}
});
jButton9.setText("Change password");
jButton9.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton9ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jCheckBox1)
.addGap(18, 18, 18)
.addComponent(jButton5)
.addGap(18, 18, 18)
.addComponent(jButton8)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addComponent(jTabbedPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(jButton6)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton9)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1)
.addComponent(jCheckBox1)
.addComponent(jButton5)
.addComponent(jButton8))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTabbedPane1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton6)
.addComponent(jLabel2)
.addComponent(jButton9))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
| private void initComponents() {
jTextField1 = new javax.swing.JTextField();
jLabel1 = new javax.swing.JLabel();
jCheckBox1 = new javax.swing.JCheckBox();
jButton5 = new javax.swing.JButton();
jTabbedPane1 = new javax.swing.JTabbedPane();
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jButton3 = new javax.swing.JButton();
jPanel2 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
jTable2 = new javax.swing.JTable();
jButton4 = new javax.swing.JButton();
jPanel3 = new javax.swing.JPanel();
jScrollPane3 = new javax.swing.JScrollPane();
jTable3 = new javax.swing.JTable();
jButton7 = new javax.swing.JButton();
jButton6 = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
jButton8 = new javax.swing.JButton();
jButton9 = new javax.swing.JButton();
jLabel1.setText("Search:");
jCheckBox1.setText("Show only inactive");
jButton5.setText("Storage");
jButton5.setToolTipText("");
jButton5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton5ActionPerformed(evt);
}
});
jTable1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer_id", "Customer name", "Phone", "Business"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jTable1.setMaximumSize(new java.awt.Dimension(300, 64));
jTable1.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
jTable1.getTableHeader().setReorderingAllowed(false);
jScrollPane2.setViewportView(jTable1);
jTable1.getColumnModel().getColumn(0).setResizable(false);
jTable1.getColumnModel().getColumn(1).setResizable(false);
jTable1.getColumnModel().getColumn(2).setResizable(false);
jTable1.getColumnModel().getColumn(3).setResizable(false);
jButton1.setText("Register customer");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setText("Register order");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jButton3.setText("Edit customer");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jButton1)
.addGap(60, 60, 60)
.addComponent(jButton3)
.addGap(58, 58, 58)
.addComponent(jButton2, javax.swing.GroupLayout.DEFAULT_SIZE, 197, Short.MAX_VALUE)
.addGap(18, 18, 18))
.addComponent(jScrollPane2))
.addGap(0, 0, 0))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 186, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2)
.addComponent(jButton3))
.addGap(14, 14, 14))
);
jTabbedPane1.addTab("Customers", jPanel1);
jTable2.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Address", "Name", "Phone number"
}
) {
boolean[] canEdit = new boolean [] {
false, false, false
};
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
jTable2.getTableHeader().setReorderingAllowed(false);
jScrollPane1.setViewportView(jTable2);
jButton4.setText("Get info");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGap(150, 150, 150)
.addComponent(jButton4)
.addGap(150, 150, 150))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton4))
);
jTabbedPane1.addTab("Todays tasks", jPanel2);
jTable3.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Customer", "Duration", "Day", "Menu name"
}
));
jTable3.getTableHeader().setReorderingAllowed(false);
jScrollPane3.setViewportView(jTable3);
jButton7.setText("Delete subscription");
jButton7.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton7ActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 563, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addContainerGap(175, Short.MAX_VALUE)
.addComponent(jButton7)
.addGap(175, 175, 175))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 217, Short.MAX_VALUE)
.addGap(0, 0, 0)
.addComponent(jButton7))
);
jTabbedPane1.addTab("Subscriptions", jPanel3);
jButton6.setText("Exit");
jButton6.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton6ActionPerformed(evt);
}
});
jLabel2.setText(TodaysDate.getDate());
jButton8.setText("Statistics");
jButton8.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton8ActionPerformed(evt);
}
});
jButton9.setText("Change password");
jButton9.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton9ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jCheckBox1)
.addGap(18, 18, 18)
.addComponent(jButton5)
.addGap(18, 18, 18)
.addComponent(jButton8)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addComponent(jTabbedPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(jButton6)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton9)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 120, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1)
.addComponent(jCheckBox1)
.addComponent(jButton5)
.addComponent(jButton8))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTabbedPane1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton6)
.addComponent(jLabel2)
.addComponent(jButton9))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
|
diff --git a/src/ox/stackgame/ui/ProgramTextUI.java b/src/ox/stackgame/ui/ProgramTextUI.java
index afe7bfb..2bced12 100644
--- a/src/ox/stackgame/ui/ProgramTextUI.java
+++ b/src/ox/stackgame/ui/ProgramTextUI.java
@@ -1,231 +1,231 @@
/**
*
*/
package ox.stackgame.ui;
import java.util.ArrayList;
import java.util.List;
import java.awt.*;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import javax.swing.event.*;
import javax.swing.text.*;
import ox.stackgame.stackmachine.*;
import ox.stackgame.stackmachine.Lexer.LexerException;
import ox.stackgame.stackmachine.instructions.*;
/**
* Allows the user to input text while in Challenge/FreeDesignMode. Also
* displays control buttons. When user presses 'Play', the plaintext is fed
* through a lexer and updates the StackMachine. The frame is not editable in
* RunMode, but displays the current program counter.
*
* @author danfox
*
*/
@SuppressWarnings("serial")
public class ProgramTextUI extends JLayeredPane {
public static Font font = new Font(Font.MONOSPACED, Font.PLAIN, 15);
public static Color editableTextColor = new Color(186, 96, 96);
public static Color frozenTextColor = new Color(222, 147, 95);
private final JTextArea jta = new JTextArea();
public final Document document = jta.getDocument();
private Highlighter highlighter;
private boolean dirtyText = true;
private final StateManager sm;
private ErrorUI eui;
private ModeVisitor modeActivationVisitor = new ModeVisitor() {
public void visit(RunMode m) { }
public void visit(ChallengeMode m) {
jta.setEditable(true);
jta.setForeground(editableTextColor);
ProgramTextUI.this.antiLex(sm.stackMachine.getInstructions());
}
public void visit(FreeDesignMode m) {
jta.setEditable(true);
jta.setForeground(editableTextColor);
ProgramTextUI.this.antiLex(sm.stackMachine.getInstructions());
}
};
private ModeVisitor modeDeactivationVisitor = new ModeVisitor() {
public void visit(RunMode m) {
highlighter.removeAllHighlights();
}
public void visit(ChallengeMode m) {
jta.setEditable(false);
jta.setForeground(frozenTextColor);
}
public void visit(FreeDesignMode m) {
jta.setEditable(false);
jta.setForeground(frozenTextColor);
}
};
private StackMachineListener l = new StackMachineListenerAdapter() {
public void programCounterChanged(int line, Instruction instruction) {
highlight(instruction.line == -1 ? line - 1 : instruction.line);
}
};
/**
*
* @param stateManager
* @param runMode
* @param eui Output stream for errors
*/
public ProgramTextUI(final StateManager stateManager, final RunMode runMode, ErrorUI eui) {
super();
this.eui = eui;
this.sm = stateManager;
stateManager.registerModeActivationVisitor(modeActivationVisitor);
stateManager.registerModeDeactivationVisitor(modeDeactivationVisitor);
stateManager.stackMachine.addListener(l);
this.add(createScrollPane(), new Integer(0)); // fills container
jta.setText(antiLex(stateManager.stackMachine.getInstructions()));
}
private void highlight(int line) {
try {
highlighter.removeAllHighlights();
highlighter.addHighlight(jta.getLineStartOffset(line), jta.getLineEndOffset(line), new DefaultHighlighter.DefaultHighlightPainter(
new Color(129, 162, 190)));
} catch (BadLocationException e) {
throw new RuntimeException("pc shouldn't be out of bounds");
}
}
public boolean isTextDirty() {
return dirtyText;
}
private void redHighlight(int line, int start, int end) {
try {
highlighter.removeAllHighlights();
highlighter.addHighlight(jta.getLineStartOffset(line) + start, jta.getLineStartOffset(line) + end,
new DefaultHighlighter.DefaultHighlightPainter(new Color(150, 30, 30)));
} catch (BadLocationException e) {
throw new RuntimeException("pc shouldn't be out of bounds");
}
}
private JScrollPane createScrollPane() {
// create a scroll pane
JScrollPane jsp = new JScrollPane();
jsp.setBounds(0, 0, ApplicationFrame.CENTER_PANEL_WIDTH, ApplicationFrame.PROGRAMTEXTUI_HEIGHT);
jsp.setBorder(new EmptyBorder(5, 0, 5, 0));
jsp.setBackground(ApplicationFrame.caBlue);
// jsp.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
// create an editable textarea
jta.setBackground(ApplicationFrame.caBlue);
jta.setForeground(editableTextColor);
jta.setMargin(new Insets(0, 20, 0, 20)); // compensates for the
// height of the stackUI
jta.setFont(font);
jta.setCaretColor(new Color(150, 150, 150));
highlighter = jta.getHighlighter();
// create textarea to display linenumbers
final JTextArea lines = new JTextArea("1");
lines.setMargin(new Insets(0, 20, 0, 0));
lines.setForeground(new Color(150, 150, 150));
lines.setBackground(ApplicationFrame.caBlue);
lines.setFont(font);
lines.setEditable(false);
// listen for changes in jta and update linenumbers
jta.getDocument().addDocumentListener(new DocumentListener() {
public String getLinesText() {
int caretPosition = jta.getDocument().getLength();
Element root = jta.getDocument().getDefaultRootElement();
StringBuilder sb = new StringBuilder();
- sb.append( "1" + System.getProperty("line.separator") );
+ sb.append( "1" );
for (int i = 2; i < root.getElementIndex(caretPosition) + 2; i++) {
- sb.append( i + System.getProperty("line.separator") );
+ sb.append( System.getProperty("line.separator") + i );
}
return sb.toString();
}
private void textChanged() {
highlighter.removeAllHighlights();
dirtyText = true;
}
public void changedUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void insertUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void removeUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
});
// place the textarea in the scrollable window
jsp.getViewport().add(jta);
jsp.setRowHeaderView(lines); // keeps the line numbers in sync.
return jsp;
}
public List<Instruction> getProgram() {
List<Instruction> program = lex(jta.getText());
return program != null ? program : new ArrayList<Instruction>();
}
private List<Instruction> lex(String text) {
ArrayList<Instruction> p = new ArrayList<Instruction>();
try {
p = Lexer.lex(text);
eui.clearErrors();
dirtyText = false;
System.out.println("Dirty text false: lexed successfully.");
} catch (LexerException e) {
redHighlight(e.lineNumber, e.wordStart, e.wordEnd);
eui.displayError("Lexer Error on line " + (e.lineNumber + 1) + ": " + e.getMessage());
System.err.println("Lexer error on line " + (e.lineNumber + 1) + ": " + e.getMessage());
}
return p;
}
private String antiLex(List<Instruction> program) {
StringBuilder b = new StringBuilder();
for (Instruction instr : program) {
if (b.length() != 0)
b.append("\n");
b.append(instr.name);
if (instr.arg != null)
b.append(" " + instr.arg.getValue());
}
dirtyText = false;
return b.toString();
}
}
| false | true | private JScrollPane createScrollPane() {
// create a scroll pane
JScrollPane jsp = new JScrollPane();
jsp.setBounds(0, 0, ApplicationFrame.CENTER_PANEL_WIDTH, ApplicationFrame.PROGRAMTEXTUI_HEIGHT);
jsp.setBorder(new EmptyBorder(5, 0, 5, 0));
jsp.setBackground(ApplicationFrame.caBlue);
// jsp.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
// create an editable textarea
jta.setBackground(ApplicationFrame.caBlue);
jta.setForeground(editableTextColor);
jta.setMargin(new Insets(0, 20, 0, 20)); // compensates for the
// height of the stackUI
jta.setFont(font);
jta.setCaretColor(new Color(150, 150, 150));
highlighter = jta.getHighlighter();
// create textarea to display linenumbers
final JTextArea lines = new JTextArea("1");
lines.setMargin(new Insets(0, 20, 0, 0));
lines.setForeground(new Color(150, 150, 150));
lines.setBackground(ApplicationFrame.caBlue);
lines.setFont(font);
lines.setEditable(false);
// listen for changes in jta and update linenumbers
jta.getDocument().addDocumentListener(new DocumentListener() {
public String getLinesText() {
int caretPosition = jta.getDocument().getLength();
Element root = jta.getDocument().getDefaultRootElement();
StringBuilder sb = new StringBuilder();
sb.append( "1" + System.getProperty("line.separator") );
for (int i = 2; i < root.getElementIndex(caretPosition) + 2; i++) {
sb.append( i + System.getProperty("line.separator") );
}
return sb.toString();
}
private void textChanged() {
highlighter.removeAllHighlights();
dirtyText = true;
}
public void changedUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void insertUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void removeUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
});
// place the textarea in the scrollable window
jsp.getViewport().add(jta);
jsp.setRowHeaderView(lines); // keeps the line numbers in sync.
return jsp;
}
| private JScrollPane createScrollPane() {
// create a scroll pane
JScrollPane jsp = new JScrollPane();
jsp.setBounds(0, 0, ApplicationFrame.CENTER_PANEL_WIDTH, ApplicationFrame.PROGRAMTEXTUI_HEIGHT);
jsp.setBorder(new EmptyBorder(5, 0, 5, 0));
jsp.setBackground(ApplicationFrame.caBlue);
// jsp.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
// create an editable textarea
jta.setBackground(ApplicationFrame.caBlue);
jta.setForeground(editableTextColor);
jta.setMargin(new Insets(0, 20, 0, 20)); // compensates for the
// height of the stackUI
jta.setFont(font);
jta.setCaretColor(new Color(150, 150, 150));
highlighter = jta.getHighlighter();
// create textarea to display linenumbers
final JTextArea lines = new JTextArea("1");
lines.setMargin(new Insets(0, 20, 0, 0));
lines.setForeground(new Color(150, 150, 150));
lines.setBackground(ApplicationFrame.caBlue);
lines.setFont(font);
lines.setEditable(false);
// listen for changes in jta and update linenumbers
jta.getDocument().addDocumentListener(new DocumentListener() {
public String getLinesText() {
int caretPosition = jta.getDocument().getLength();
Element root = jta.getDocument().getDefaultRootElement();
StringBuilder sb = new StringBuilder();
sb.append( "1" );
for (int i = 2; i < root.getElementIndex(caretPosition) + 2; i++) {
sb.append( System.getProperty("line.separator") + i );
}
return sb.toString();
}
private void textChanged() {
highlighter.removeAllHighlights();
dirtyText = true;
}
public void changedUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void insertUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
public void removeUpdate(DocumentEvent de) {
textChanged();
lines.setText(getLinesText());
}
});
// place the textarea in the scrollable window
jsp.getViewport().add(jta);
jsp.setRowHeaderView(lines); // keeps the line numbers in sync.
return jsp;
}
|
diff --git a/src/au/gov/naa/digipres/xena/plugin/html/HtmlView.java b/src/au/gov/naa/digipres/xena/plugin/html/HtmlView.java
index 90b91e0f..888647bd 100644
--- a/src/au/gov/naa/digipres/xena/plugin/html/HtmlView.java
+++ b/src/au/gov/naa/digipres/xena/plugin/html/HtmlView.java
@@ -1,136 +1,136 @@
package au.gov.naa.digipres.xena.plugin.html;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import javax.swing.JButton;
import javax.swing.JEditorPane;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.text.html.HTMLEditorKit;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import au.gov.naa.digipres.xena.kernel.PrintXml;
import au.gov.naa.digipres.xena.kernel.XenaException;
import au.gov.naa.digipres.xena.util.JdomXenaView;
/**
* View to display HTML. We use the Java internal HTML widget to display the HTML,
* but the fact is the Java HTML viewer is pathetic, so we provide a button
* to open it in an external browser.
*
* @author Chris Bitmead
*/
public class HtmlView extends JdomXenaView {
JScrollPane scrollPane = new JScrollPane();
HTMLEditorKit htmlKit = new HTMLEditorKit();
JEditorPane ep = new JEditorPane();
private JButton externalButton = new JButton();
public HtmlView() {
try {
jbInit();
} catch (Exception e) {
e.printStackTrace();
}
}
public String getViewName() {
return "HTML View";
}
public void initListeners() {
}
public boolean canShowTag(String tag) throws XenaException {
return tag.equals(viewManager.getPluginManager().getTypeManager().lookupXenaFileType(XenaHtmlFileType.class).getTag());
}
public void updateViewFromElement() throws XenaException {
try {
ByteArrayOutputStream os = new ByteArrayOutputStream();
new HackPrintXml().printXml(getElement(), os);
ByteArrayInputStream in = new ByteArrayInputStream(os.toByteArray());
try {
htmlKit.read(in, ep.getDocument(), 0);
} catch (Exception x) {
// Sometimes wierd HTML freaks it out.
throw new XenaException(x);
}
ep.setCaretPosition(0);
} catch (IOException e) {
throw new XenaException(e);
}
}
private void jbInit() throws Exception {
ep.setEditorKit(htmlKit);
ep.setContentType("text/html; charset=" + PrintXml.singleton().ENCODING);
ep.getDocument().putProperty("IgnoreCharsetDirective", new Boolean(true));
scrollPane.getViewport().add(ep);
this.add(scrollPane, BorderLayout.CENTER);
externalButton.setToolTipText("");
externalButton.setText("Show in Browser Window");
externalButton.addActionListener(
new java.awt.event.ActionListener() {
public void actionPerformed(ActionEvent e) {
externalButton_actionPerformed(e);
}
});
this.add(externalButton, BorderLayout.NORTH);
}
/**
* This fixes a bug in Internet Explorer's rendering, specifically
* it allows META REFRESH to work.
*/
public static class HackPrintXml extends PrintXml {
public Format getFormatter() {
Format format = super.getFormatter();
format.setExpandEmptyElements(true);
return format;
}
}
void externalButton_actionPerformed(ActionEvent e) {
File output = null;
try {
output = File.createTempFile("output", ".html");
output.deleteOnExit();
String ENCODING = "UTF-8";
OutputStream os = new FileOutputStream(output);
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
OutputStreamWriter out = new OutputStreamWriter(os, ENCODING);
outputter.output(getElement(), out);
out.close();
os.close();
BrowserLauncher.openURL(output.toURL().toString());
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, ex);
} finally {
// If we don't sleep the file disappears before the browser
// has time to start.
try {
- Thread.currentThread().sleep(5000);
+ Thread.sleep(5000);
} catch (InterruptedException ex) {
// Nothing.
}
if (output != null) {
output.delete();
}
}
}
}
| true | true | void externalButton_actionPerformed(ActionEvent e) {
File output = null;
try {
output = File.createTempFile("output", ".html");
output.deleteOnExit();
String ENCODING = "UTF-8";
OutputStream os = new FileOutputStream(output);
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
OutputStreamWriter out = new OutputStreamWriter(os, ENCODING);
outputter.output(getElement(), out);
out.close();
os.close();
BrowserLauncher.openURL(output.toURL().toString());
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, ex);
} finally {
// If we don't sleep the file disappears before the browser
// has time to start.
try {
Thread.currentThread().sleep(5000);
} catch (InterruptedException ex) {
// Nothing.
}
if (output != null) {
output.delete();
}
}
}
| void externalButton_actionPerformed(ActionEvent e) {
File output = null;
try {
output = File.createTempFile("output", ".html");
output.deleteOnExit();
String ENCODING = "UTF-8";
OutputStream os = new FileOutputStream(output);
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
OutputStreamWriter out = new OutputStreamWriter(os, ENCODING);
outputter.output(getElement(), out);
out.close();
os.close();
BrowserLauncher.openURL(output.toURL().toString());
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, ex);
} finally {
// If we don't sleep the file disappears before the browser
// has time to start.
try {
Thread.sleep(5000);
} catch (InterruptedException ex) {
// Nothing.
}
if (output != null) {
output.delete();
}
}
}
|
diff --git a/h2/src/test/org/h2/test/TestBase.java b/h2/src/test/org/h2/test/TestBase.java
index e6fb0f57f..43d763a92 100644
--- a/h2/src/test/org/h2/test/TestBase.java
+++ b/h2/src/test/org/h2/test/TestBase.java
@@ -1,1485 +1,1485 @@
/*
* Copyright 2004-2013 H2 Group. Multiple-Licensed under the H2 License,
* Version 1.0, and under the Eclipse Public License, Version 1.0
* (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.test;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.SimpleTimeZone;
import org.h2.jdbc.JdbcConnection;
import org.h2.message.DbException;
import org.h2.message.TraceSystem;
import org.h2.store.FileLock;
import org.h2.store.fs.FileUtils;
import org.h2.test.utils.ProxyCodeGenerator;
import org.h2.test.utils.ResultVerifier;
import org.h2.tools.DeleteDbFiles;
/**
* The base class for all tests.
*/
public abstract class TestBase {
/**
* The base directory.
*/
public static final String BASE_TEST_DIR = "data";
/**
* The temporary directory.
*/
protected static final String TEMP_DIR = "data/temp";
/**
* An id used to create unique file names.
*/
protected static int uniqueId;
/**
* The base directory to write test databases.
*/
private static String baseDir = getTestDir("");
/**
* The last time something was printed.
*/
private static long lastPrint;
/**
* The test configuration.
*/
public TestAll config;
/**
* The time when the test was started.
*/
protected long start;
private final LinkedList<byte[]> memory = new LinkedList<byte[]>();
/**
* Get the test directory for this test.
*
* @param name the directory name suffix
* @return the test directory
*/
public static String getTestDir(String name) {
return BASE_TEST_DIR + "/test" + name;
}
/**
* Start the TCP server if enabled in the configuration.
*/
protected void startServerIfRequired() throws SQLException {
config.beforeTest();
}
/**
* Initialize the test configuration using the default settings.
*
* @return itself
*/
public TestBase init() throws Exception {
return init(new TestAll());
}
/**
* Initialize the test configuration.
*
* @param conf the configuration
* @return itself
*/
public TestBase init(TestAll conf) throws Exception {
baseDir = getTestDir("");
System.setProperty("java.io.tmpdir", TEMP_DIR);
this.config = conf;
return this;
}
/**
* Run a test case using the given seed value.
*
* @param seed the random seed value
*/
public void testCase(int seed) throws Exception {
// do nothing
}
/**
* This method is initializes the test, runs the test by calling the test()
* method, and prints status information. It also catches exceptions so that
* the tests can continue.
*
* @param conf the test configuration
*/
public void runTest(TestAll conf) {
try {
init(conf);
start = System.currentTimeMillis();
test();
println("");
} catch (Throwable e) {
println("FAIL " + e.toString());
logError("FAIL " + e.toString(), e);
if (config.stopOnError) {
throw new AssertionError("ERROR");
}
if (e instanceof OutOfMemoryError) {
throw (OutOfMemoryError) e;
}
} finally {
try {
FileUtils.deleteRecursive("memFS:", false);
FileUtils.deleteRecursive("nioMemFS:", false);
FileUtils.deleteRecursive("memLZF:", false);
FileUtils.deleteRecursive("nioMemLZF:", false);
} catch (RuntimeException e) {
e.printStackTrace();
}
}
}
/**
* Open a database connection in admin mode. The default user name and
* password is used.
*
* @param name the database name
* @return the connection
*/
public Connection getConnection(String name) throws SQLException {
return getConnectionInternal(getURL(name, true), getUser(), getPassword());
}
/**
* Open a database connection.
*
* @param name the database name
* @param user the user name to use
* @param password the password to use
* @return the connection
*/
public Connection getConnection(String name, String user, String password) throws SQLException {
return getConnectionInternal(getURL(name, false), user, password);
}
/**
* Get the password to use to login for the given user password. The file
* password is added if required.
*
* @param userPassword the password of this user
* @return the login password
*/
protected String getPassword(String userPassword) {
return config == null || config.cipher == null ? userPassword : getFilePassword() + " " + userPassword;
}
/**
* Get the file password (only required if file encryption is used).
*
* @return the file password
*/
protected String getFilePassword() {
return "filePassword";
}
/**
* Get the login password. This is usually the user password. If file
* encryption is used it is combined with the file password.
*
* @return the login password
*/
protected String getPassword() {
return getPassword("123");
}
/**
* Get the base directory for tests.
* If a special file system is used, the prefix is prepended.
*
* @return the directory, possibly including file system prefix
*/
public String getBaseDir() {
String dir = baseDir;
if (config != null) {
if (config.reopen) {
dir = "rec:memFS:" + dir;
}
if (config.splitFileSystem) {
dir = "split:16:" + dir;
}
}
// return "split:nioMapped:" + baseDir;
return dir;
}
/**
* Get the database URL for the given database name using the current
* configuration options.
*
* @param name the database name
* @param admin true if the current user is an admin
* @return the database URL
*/
protected String getURL(String name, boolean admin) {
String url;
if (name.startsWith("jdbc:")) {
if (config.mvStore) {
name = addOption(name, "MV_STORE", "true");
}
return name;
}
if (config.memory) {
name = "mem:" + name;
} else {
int idx = name.indexOf(':');
if (idx < 0 || idx > 10) {
// index > 10 if in options
name = getBaseDir() + "/" + name;
}
}
if (config.networked) {
if (config.ssl) {
url = "ssl://localhost:9192/" + name;
} else {
url = "tcp://localhost:9192/" + name;
}
} else if (config.googleAppEngine) {
url = "gae://" + name + ";FILE_LOCK=NO;AUTO_SERVER=FALSE;DB_CLOSE_ON_EXIT=FALSE";
} else {
url = name;
}
if (config.mvStore) {
- url = addOption(name, "MV_STORE", "true");
+ url = addOption(url, "MV_STORE", "true");
}
if (!config.memory) {
if (config.smallLog && admin) {
url = addOption(url, "MAX_LOG_SIZE", "1");
}
}
if (config.traceSystemOut) {
url = addOption(url, "TRACE_LEVEL_SYSTEM_OUT", "2");
}
if (config.traceLevelFile > 0 && admin) {
url = addOption(url, "TRACE_LEVEL_FILE", "" + config.traceLevelFile);
url = addOption(url, "TRACE_MAX_FILE_SIZE", "8");
}
url = addOption(url, "LOG", "1");
if (config.throttleDefault > 0) {
url = addOption(url, "THROTTLE", "" + config.throttleDefault);
} else if (config.throttle > 0) {
url = addOption(url, "THROTTLE", "" + config.throttle);
}
url = addOption(url, "LOCK_TIMEOUT", "50");
if (config.diskUndo && admin) {
url = addOption(url, "MAX_MEMORY_UNDO", "3");
}
if (config.big && admin) {
// force operations to disk
url = addOption(url, "MAX_OPERATION_MEMORY", "1");
}
if (config.mvcc) {
url = addOption(url, "MVCC", "TRUE");
}
if (config.cacheType != null && admin) {
url = addOption(url, "CACHE_TYPE", config.cacheType);
}
if (config.diskResult && admin) {
url = addOption(url, "MAX_MEMORY_ROWS", "100");
url = addOption(url, "CACHE_SIZE", "0");
}
if (config.cipher != null) {
url = addOption(url, "CIPHER", config.cipher);
}
if (config.defrag) {
url = addOption(url, "DEFRAG_ALWAYS", "TRUE");
}
if (config.nestedJoins) {
url = addOption(url, "NESTED_JOINS", "TRUE");
}
return "jdbc:h2:" + url;
}
private static String addOption(String url, String option, String value) {
if (url.indexOf(";" + option + "=") < 0) {
url += ";" + option + "=" + value;
}
return url;
}
private static Connection getConnectionInternal(String url, String user, String password) throws SQLException {
org.h2.Driver.load();
// url += ";DEFAULT_TABLE_TYPE=1";
// Class.forName("org.hsqldb.jdbcDriver");
// return DriverManager.getConnection("jdbc:hsqldb:" + name, "sa", "");
return DriverManager.getConnection(url, user, password);
}
/**
* Get the small or the big value depending on the configuration.
*
* @param small the value to return if the current test mode is 'small'
* @param big the value to return if the current test mode is 'big'
* @return small or big, depending on the configuration
*/
protected int getSize(int small, int big) {
return config.endless ? Integer.MAX_VALUE : config.big ? big : small;
}
protected String getUser() {
return "sa";
}
/**
* Write a message to system out if trace is enabled.
*
* @param x the value to write
*/
protected void trace(int x) {
trace("" + x);
}
/**
* Write a message to system out if trace is enabled.
*
* @param s the message to write
*/
public void trace(String s) {
if (config.traceTest) {
lastPrint = 0;
println(s);
}
}
/**
* Print how much memory is currently used.
*/
protected void traceMemory() {
if (config.traceTest) {
trace("mem=" + getMemoryUsed());
}
}
/**
* Print the currently used memory, the message and the given time in
* milliseconds.
*
* @param s the message
* @param time the time in millis
*/
public void printTimeMemory(String s, long time) {
if (config.big) {
println(getMemoryUsed() + " MB: " + s + " ms: " + time);
}
}
/**
* Get the number of megabytes heap memory in use.
*
* @return the used megabytes
*/
public static int getMemoryUsed() {
return (int) (getMemoryUsedBytes() / 1024 / 1024);
}
/**
* Get the number of bytes heap memory in use.
*
* @return the used bytes
*/
public static long getMemoryUsedBytes() {
Runtime rt = Runtime.getRuntime();
long memory = Long.MAX_VALUE;
for (int i = 0; i < 8; i++) {
rt.gc();
long memNow = rt.totalMemory() - rt.freeMemory();
if (memNow >= memory) {
break;
}
memory = memNow;
}
return memory;
}
/**
* Called if the test reached a point that was not expected.
*
* @throws AssertionError always throws an AssertionError
*/
public void fail() {
fail("Failure");
}
/**
* Called if the test reached a point that was not expected.
*
* @param string the error message
* @throws AssertionError always throws an AssertionError
*/
protected void fail(String string) {
lastPrint = 0;
println(string);
throw new AssertionError(string);
}
/**
* Log an error message.
*
* @param s the message
* @param e the exception
*/
public static void logError(String s, Throwable e) {
if (e == null) {
e = new Exception(s);
}
System.out.flush();
System.err.println("ERROR: " + s + " " + e.toString() + " ------------------------------");
e.printStackTrace();
try {
TraceSystem ts = new TraceSystem(null);
FileLock lock = new FileLock(ts, "error.lock", 1000);
lock.lock(FileLock.LOCK_FILE);
FileWriter fw = new FileWriter("error.txt", true);
PrintWriter pw = new PrintWriter(fw);
e.printStackTrace(pw);
pw.close();
fw.close();
lock.unlock();
} catch (Throwable t) {
t.printStackTrace();
}
System.err.flush();
}
/**
* Print a message to system out.
*
* @param s the message
*/
public void println(String s) {
long now = System.currentTimeMillis();
if (now > lastPrint + 1000) {
lastPrint = now;
long time = now - start;
printlnWithTime(time, getClass().getName() + " " + s);
}
}
/**
* Print a message, prepended with the specified time in milliseconds.
*
* @param millis the time in milliseconds
* @param s the message
*/
static void printlnWithTime(long millis, String s) {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
s = dateFormat.format(new java.util.Date()) + " " + formatTime(millis) + " " + s;
System.out.println(s);
}
/**
* Print the current time and a message to system out.
*
* @param s the message
*/
protected void printTime(String s) {
SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
println(dateFormat.format(new java.util.Date()) + " " + s);
}
/**
* Format the time in the format hh:mm:ss.1234 where 1234 is milliseconds.
*
* @param millis the time in milliseconds
* @return the formatted time
*/
static String formatTime(long millis) {
String s = new java.sql.Time(java.sql.Time.valueOf("0:0:0").getTime() + millis).toString()
+ "." + ("" + (1000 + (millis % 1000))).substring(1);
if (s.startsWith("00:")) {
s = s.substring(3);
}
return s;
}
/**
* Delete all database files for this database.
*
* @param name the database name
*/
protected void deleteDb(String name) {
deleteDb(getBaseDir(), name);
}
/**
* Delete all database files for a database.
*
* @param dir the directory where the database files are located
* @param name the database name
*/
protected void deleteDb(String dir, String name) {
DeleteDbFiles.execute(dir, name, true);
// ArrayList<String> list;
// list = FileLister.getDatabaseFiles(baseDir, name, true);
// if (list.size() > 0) {
// System.out.println("Not deleted: " + list);
// }
}
/**
* This method will be called by the test framework.
*
* @throws Exception if an exception in the test occurs
*/
public abstract void test() throws Exception;
/**
* Check if two values are equal, and if not throw an exception.
*
* @param message the message to print in case of error
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
public void assertEquals(String message, int expected, int actual) {
if (expected != actual) {
fail("Expected: " + expected + " actual: " + actual + " message: " + message);
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
public void assertEquals(int expected, int actual) {
if (expected != actual) {
fail("Expected: " + expected + " actual: " + actual);
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
public void assertEquals(byte[] expected, byte[] actual) {
if (expected == null || actual == null) {
assertTrue(expected == actual);
return;
}
assertEquals(expected.length, actual.length);
for (int i = 0; i < expected.length; i++) {
if (expected[i] != actual[i]) {
fail("[" + i + "]: expected: " + (int) expected[i] + " actual: " + (int) actual[i]);
}
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
public void assertEquals(java.util.Date expected, java.util.Date actual) {
if (expected != actual && !expected.equals(actual)) {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
SimpleTimeZone gmt = new SimpleTimeZone(0, "Z");
df.setTimeZone(gmt);
fail("Expected: " + df.format(expected) + " actual: " + df.format(actual));
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
public void assertEquals(Object[] expected, Object[] actual) {
if (expected == null || actual == null) {
assertTrue(expected == actual);
return;
}
assertEquals(expected.length, actual.length);
for (int i = 0; i < expected.length; i++) {
if (expected[i] == null || actual[i] == null) {
if (expected[i] != actual[i]) {
fail("[" + i + "]: expected: " + expected[i] + " actual: " + actual[i]);
}
} else if (!expected[i].equals(actual[i])) {
fail("[" + i + "]: expected: " + expected[i] + " actual: " + actual[i]);
}
}
}
/**
* Check if two readers are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @param len the maximum length, or -1
* @throws AssertionError if the values are not equal
*/
protected void assertEqualReaders(Reader expected, Reader actual, int len) throws IOException {
for (int i = 0; len < 0 || i < len; i++) {
int ce = expected.read();
int ca = actual.read();
assertEquals(ce, ca);
if (ce == -1) {
break;
}
}
expected.close();
actual.close();
}
/**
* Check if two streams are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @param len the maximum length, or -1
* @throws AssertionError if the values are not equal
*/
protected void assertEqualStreams(InputStream expected, InputStream actual, int len) throws IOException {
// this doesn't actually read anything - just tests reading 0 bytes
actual.read(new byte[0]);
expected.read(new byte[0]);
actual.read(new byte[10], 3, 0);
expected.read(new byte[10], 0, 0);
for (int i = 0; len < 0 || i < len; i++) {
int ca = actual.read();
actual.read(new byte[0]);
int ce = expected.read();
if (ca != ce) {
assertEquals("Error at index " + i, ce, ca);
}
if (ca == -1) {
break;
}
}
actual.read(new byte[10], 3, 0);
expected.read(new byte[10], 0, 0);
actual.read(new byte[0]);
expected.read(new byte[0]);
actual.close();
expected.close();
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param message the message to use if the check fails
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(String message, String expected, String actual) {
if (expected == null && actual == null) {
return;
} else if (expected == null || actual == null) {
fail("Expected: " + expected + " Actual: " + actual + " " + message);
} else if (!expected.equals(actual)) {
for (int i = 0; i < expected.length(); i++) {
String s = expected.substring(0, i);
if (!actual.startsWith(s)) {
expected = expected.substring(0, i) + "<*>" + expected.substring(i);
break;
}
}
int al = expected.length();
int bl = actual.length();
if (al > 4000) {
expected = expected.substring(0, 4000);
}
if (bl > 4000) {
actual = actual.substring(0, 4000);
}
fail("Expected: " + expected + " (" + al + ") actual: " + actual + " (" + bl + ") " + message);
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(String expected, String actual) {
assertEquals("", expected, actual);
}
/**
* Check if two result sets are equal, and if not throw an exception.
*
* @param message the message to use if the check fails
* @param rs0 the first result set
* @param rs1 the second result set
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(String message, ResultSet rs0, ResultSet rs1) throws SQLException {
ResultSetMetaData meta = rs0.getMetaData();
int columns = meta.getColumnCount();
assertEquals(columns, rs1.getMetaData().getColumnCount());
while (rs0.next()) {
assertTrue(message, rs1.next());
for (int i = 0; i < columns; i++) {
assertEquals(message, rs0.getString(i + 1), rs1.getString(i + 1));
}
}
assertFalse(message, rs0.next());
assertFalse(message, rs1.next());
}
/**
* Check if the first value is larger or equal than the second value, and if
* not throw an exception.
*
* @param a the first value
* @param b the second value (must be smaller than the first value)
* @throws AssertionError if the first value is smaller
*/
protected void assertSmaller(long a, long b) {
if (a >= b) {
fail("a: " + a + " is not smaller than b: " + b);
}
}
/**
* Check that a result contains the given substring.
*
* @param result the result value
* @param contains the term that should appear in the result
* @throws AssertionError if the term was not found
*/
protected void assertContains(String result, String contains) {
if (result.indexOf(contains) < 0) {
fail(result + " does not contain: " + contains);
}
}
/**
* Check that a text starts with the expected characters..
*
* @param text the text
* @param expectedStart the expected prefix
* @throws AssertionError if the text does not start with the expected characters
*/
protected void assertStartsWith(String text, String expectedStart) {
if (!text.startsWith(expectedStart)) {
fail("[" + text + "] does not start with: [" + expectedStart + "]");
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(long expected, long actual) {
if (expected != actual) {
fail("Expected: " + expected + " actual: " + actual);
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(double expected, double actual) {
if (expected != actual) {
if (Double.isNaN(expected) && Double.isNaN(actual)) {
// if both a NaN, then there is no error
} else {
fail("Expected: " + expected + " actual: " + actual);
}
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(float expected, float actual) {
if (expected != actual) {
if (Float.isNaN(expected) && Float.isNaN(actual)) {
// if both a NaN, then there is no error
} else {
fail("Expected: " + expected + " actual: " + actual);
}
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(boolean expected, boolean actual) {
if (expected != actual) {
fail("Boolean expected: " + expected + " actual: " + actual);
}
}
/**
* Check that the passed boolean is true.
*
* @param condition the condition
* @throws AssertionError if the condition is false
*/
public void assertTrue(boolean condition) {
assertTrue("Expected: true got: false", condition);
}
/**
* Check that the passed object is null.
*
* @param obj the object
* @throws AssertionError if the condition is false
*/
public void assertNull(Object obj) {
if (obj != null) {
fail("Expected: null got: " + obj);
}
}
/**
* Check that the passed boolean is true.
*
* @param message the message to print if the condition is false
* @param condition the condition
* @throws AssertionError if the condition is false
*/
protected void assertTrue(String message, boolean condition) {
if (!condition) {
fail(message);
}
}
/**
* Check that the passed boolean is false.
*
* @param value the condition
* @throws AssertionError if the condition is true
*/
protected void assertFalse(boolean value) {
assertFalse("Expected: false got: true", value);
}
/**
* Check that the passed boolean is false.
*
* @param message the message to print if the condition is false
* @param value the condition
* @throws AssertionError if the condition is true
*/
protected void assertFalse(String message, boolean value) {
if (value) {
fail(message);
}
}
/**
* Check that the result set row count matches.
*
* @param expected the number of expected rows
* @param rs the result set
* @throws AssertionError if a different number of rows have been found
*/
protected void assertResultRowCount(int expected, ResultSet rs) throws SQLException {
int i = 0;
while (rs.next()) {
i++;
}
assertEquals(expected, i);
}
/**
* Check that the result set of a query is exactly this value.
*
* @param stat the statement
* @param sql the SQL statement to execute
* @param expected the expected result value
* @throws AssertionError if a different result value was returned
*/
protected void assertSingleValue(Statement stat, String sql, int expected) throws SQLException {
ResultSet rs = stat.executeQuery(sql);
assertTrue(rs.next());
assertEquals(expected, rs.getInt(1));
assertFalse(rs.next());
}
/**
* Check that the result set of a query is exactly this value.
*
* @param expected the expected result value
* @param stat the statement
* @param sql the SQL statement to execute
* @throws AssertionError if a different result value was returned
*/
protected void assertResult(String expected, Statement stat, String sql) throws SQLException {
ResultSet rs = stat.executeQuery(sql);
if (rs.next()) {
String actual = rs.getString(1);
assertEquals(expected, actual);
} else {
assertEquals(expected, null);
}
}
/**
* Check if the result set meta data is correct.
*
* @param rs the result set
* @param columnCount the expected column count
* @param labels the expected column labels
* @param datatypes the expected data types
* @param precision the expected precisions
* @param scale the expected scales
*/
protected void assertResultSetMeta(ResultSet rs, int columnCount, String[] labels, int[] datatypes, int[] precision,
int[] scale) throws SQLException {
ResultSetMetaData meta = rs.getMetaData();
int cc = meta.getColumnCount();
if (cc != columnCount) {
fail("result set contains " + cc + " columns not " + columnCount);
}
for (int i = 0; i < columnCount; i++) {
if (labels != null) {
String l = meta.getColumnLabel(i + 1);
if (!labels[i].equals(l)) {
fail("column label " + i + " is " + l + " not " + labels[i]);
}
}
if (datatypes != null) {
int t = meta.getColumnType(i + 1);
if (datatypes[i] != t) {
fail("column datatype " + i + " is " + t + " not " + datatypes[i] + " (prec="
+ meta.getPrecision(i + 1) + " scale=" + meta.getScale(i + 1) + ")");
}
String typeName = meta.getColumnTypeName(i + 1);
String className = meta.getColumnClassName(i + 1);
switch (t) {
case Types.INTEGER:
assertEquals("INTEGER", typeName);
assertEquals("java.lang.Integer", className);
break;
case Types.VARCHAR:
assertEquals("VARCHAR", typeName);
assertEquals("java.lang.String", className);
break;
case Types.SMALLINT:
assertEquals("SMALLINT", typeName);
assertEquals("java.lang.Short", className);
break;
case Types.TIMESTAMP:
assertEquals("TIMESTAMP", typeName);
assertEquals("java.sql.Timestamp", className);
break;
case Types.DECIMAL:
assertEquals("DECIMAL", typeName);
assertEquals("java.math.BigDecimal", className);
break;
default:
}
}
if (precision != null) {
int p = meta.getPrecision(i + 1);
if (precision[i] != p) {
fail("column precision " + i + " is " + p + " not " + precision[i]);
}
}
if (scale != null) {
int s = meta.getScale(i + 1);
if (scale[i] != s) {
fail("column scale " + i + " is " + s + " not " + scale[i]);
}
}
}
}
/**
* Check if a result set contains the expected data.
* The sort order is significant
*
* @param rs the result set
* @param data the expected data
* @throws AssertionError if there is a mismatch
*/
protected void assertResultSetOrdered(ResultSet rs, String[][] data) throws SQLException {
assertResultSet(true, rs, data);
}
/**
* Check if a result set contains the expected data.
*
* @param ordered if the sort order is significant
* @param rs the result set
* @param data the expected data
* @throws AssertionError if there is a mismatch
*/
private void assertResultSet(boolean ordered, ResultSet rs, String[][] data) throws SQLException {
int len = rs.getMetaData().getColumnCount();
int rows = data.length;
if (rows == 0) {
// special case: no rows
if (rs.next()) {
fail("testResultSet expected rowCount:" + rows + " got:0");
}
}
int len2 = data[0].length;
if (len < len2) {
fail("testResultSet expected columnCount:" + len2 + " got:" + len);
}
for (int i = 0; i < rows; i++) {
if (!rs.next()) {
fail("testResultSet expected rowCount:" + rows + " got:" + i);
}
String[] row = getData(rs, len);
if (ordered) {
String[] good = data[i];
if (!testRow(good, row, good.length)) {
fail("testResultSet row not equal, got:\n" + formatRow(row) + "\n" + formatRow(good));
}
} else {
boolean found = false;
for (int j = 0; j < rows; j++) {
String[] good = data[i];
if (testRow(good, row, good.length)) {
found = true;
break;
}
}
if (!found) {
fail("testResultSet no match for row:" + formatRow(row));
}
}
}
if (rs.next()) {
String[] row = getData(rs, len);
fail("testResultSet expected rowcount:" + rows + " got:>=" + (rows + 1) + " data:" + formatRow(row));
}
}
private static boolean testRow(String[] a, String[] b, int len) {
for (int i = 0; i < len; i++) {
String sa = a[i];
String sb = b[i];
if (sa == null || sb == null) {
if (sa != sb) {
return false;
}
} else {
if (!sa.equals(sb)) {
return false;
}
}
}
return true;
}
private static String[] getData(ResultSet rs, int len) throws SQLException {
String[] data = new String[len];
for (int i = 0; i < len; i++) {
data[i] = rs.getString(i + 1);
// just check if it works
rs.getObject(i + 1);
}
return data;
}
private static String formatRow(String[] row) {
String sb = "";
for (String r : row) {
sb += "{" + r + "}";
}
return "{" + sb + "}";
}
/**
* Simulate a database crash. This method will also close the database
* files, but the files are in a state as the power was switched off. It
* doesn't throw an exception.
*
* @param conn the database connection
*/
protected void crash(Connection conn) {
((JdbcConnection) conn).setPowerOffCount(1);
try {
conn.createStatement().execute("SET WRITE_DELAY 0");
conn.createStatement().execute("CREATE TABLE TEST_A(ID INT)");
fail("should be crashed already");
} catch (SQLException e) {
// expected
}
try {
conn.close();
} catch (SQLException e) {
// ignore
}
}
/**
* Read a string from the reader. This method reads until end of file.
*
* @param reader the reader
* @return the string read
*/
protected String readString(Reader reader) {
if (reader == null) {
return null;
}
StringBuilder buffer = new StringBuilder();
try {
while (true) {
int c = reader.read();
if (c == -1) {
break;
}
buffer.append((char) c);
}
return buffer.toString();
} catch (Exception e) {
assertTrue(false);
return null;
}
}
/**
* Check that a given exception is not an unexpected 'general error'
* exception.
*
* @param e the error
*/
public void assertKnownException(SQLException e) {
assertKnownException("", e);
}
/**
* Check that a given exception is not an unexpected 'general error'
* exception.
*
* @param message the message
* @param e the exception
*/
protected void assertKnownException(String message, SQLException e) {
if (e != null && e.getSQLState().startsWith("HY000")) {
TestBase.logError("Unexpected General error " + message, e);
}
}
/**
* Check if two values are equal, and if not throw an exception.
*
* @param expected the expected value
* @param actual the actual value
* @throws AssertionError if the values are not equal
*/
protected void assertEquals(Integer expected, Integer actual) {
if (expected == null || actual == null) {
assertTrue(expected == null && actual == null);
} else {
assertEquals(expected.intValue(), actual.intValue());
}
}
/**
* Check if two databases contain the same met data.
*
* @param stat1 the connection to the first database
* @param stat2 the connection to the second database
* @throws AssertionError if the databases don't match
*/
protected void assertEqualDatabases(Statement stat1, Statement stat2) throws SQLException {
ResultSet rs = stat1.executeQuery("select value from information_schema.settings where name='ANALYZE_AUTO'");
int analyzeAuto = rs.next() ? rs.getInt(1) : 0;
if (analyzeAuto > 0) {
stat1.execute("analyze");
stat2.execute("analyze");
}
ResultSet rs1 = stat1.executeQuery("SCRIPT simple NOPASSWORDS");
ResultSet rs2 = stat2.executeQuery("SCRIPT simple NOPASSWORDS");
ArrayList<String> list1 = new ArrayList<String>();
ArrayList<String> list2 = new ArrayList<String>();
while (rs1.next()) {
String s1 = rs1.getString(1);
s1 = removeRowCount(s1);
if (!rs2.next()) {
fail("expected: " + s1);
}
String s2 = rs2.getString(1);
s2 = removeRowCount(s2);
if (!s1.equals(s2)) {
list1.add(s1);
list2.add(s2);
}
}
for (String s : list1) {
if (!list2.remove(s)) {
fail("only found in first: " + s + " remaining: " + list2);
}
}
assertEquals("remaining: " + list2, 0, list2.size());
assertFalse(rs2.next());
}
private static String removeRowCount(String scriptLine) {
int index = scriptLine.indexOf("+/-");
if (index >= 0) {
scriptLine = scriptLine.substring(index);
}
return scriptLine;
}
/**
* Create a new object of the calling class.
*
* @return the new test
*/
public static TestBase createCaller() {
org.h2.Driver.load();
try {
return (TestBase) new SecurityManager() {
Class<?> clazz = getClassContext()[2];
}.clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Get the classpath list used to execute java -cp ...
*
* @return the classpath list
*/
protected String getClassPath() {
return "bin" + File.pathSeparator + "temp" + File.pathSeparator + ".";
}
/**
* Use up almost all memory.
*
* @param remainingKB the number of kilobytes that are not referenced
*/
protected void eatMemory(int remainingKB) {
byte[] reserve = new byte[remainingKB * 1024];
// first, eat memory in 16 KB blocks, then eat in 16 byte blocks
for (int size = 16 * 1024; size > 0; size /= 1024) {
while (true) {
try {
byte[] block = new byte[16 * 1024];
memory.add(block);
} catch (OutOfMemoryError e) {
break;
}
}
}
// silly code - makes sure there are no warnings
reserve[0] = reserve[1];
}
/**
* Remove the hard reference to the memory.
*/
protected void freeMemory() {
memory.clear();
}
/**
* Verify the next method call on the object will throw an exception.
*
* @param <T> the class of the object
* @param expectedExceptionClass the expected exception class to be thrown
* @param obj the object to wrap
* @return a proxy for the object
*/
protected <T> T assertThrows(final Class<?> expectedExceptionClass, final T obj) {
return assertThrows(new ResultVerifier() {
@Override
public boolean verify(Object returnValue, Throwable t, Method m, Object... args) {
if (t == null) {
throw new AssertionError("Expected an exception of type " +
expectedExceptionClass.getSimpleName() +
" to be thrown, but the method returned " +
returnValue +
" for " + ProxyCodeGenerator.formatMethodCall(m, args));
}
if (!expectedExceptionClass.isAssignableFrom(t.getClass())) {
AssertionError ae = new AssertionError(
"Expected an exception of type\n" +
expectedExceptionClass.getSimpleName() +
" to be thrown, but the method under test threw an exception of type\n" +
t.getClass().getSimpleName() +
" (see in the 'Caused by' for the exception that was thrown) " +
" for " + ProxyCodeGenerator.formatMethodCall(m, args));
ae.initCause(t);
throw ae;
}
return false;
}
}, obj);
}
/**
* Verify the next method call on the object will throw an exception.
*
* @param <T> the class of the object
* @param expectedErrorCode the expected error code
* @param obj the object to wrap
* @return a proxy for the object
*/
protected <T> T assertThrows(final int expectedErrorCode, final T obj) {
return assertThrows(new ResultVerifier() {
@Override
public boolean verify(Object returnValue, Throwable t, Method m, Object... args) {
int errorCode;
if (t instanceof DbException) {
errorCode = ((DbException) t).getErrorCode();
} else if (t instanceof SQLException) {
errorCode = ((SQLException) t).getErrorCode();
} else {
errorCode = 0;
}
if (errorCode != expectedErrorCode) {
AssertionError ae = new AssertionError(
"Expected an SQLException or DbException with error code " + expectedErrorCode);
ae.initCause(t);
throw ae;
}
return false;
}
}, obj);
}
/**
* Verify the next method call on the object will throw an exception.
*
* @param <T> the class of the object
* @param verifier the result verifier to call
* @param obj the object to wrap
* @return a proxy for the object
*/
@SuppressWarnings("unchecked")
protected <T> T assertThrows(final ResultVerifier verifier, final T obj) {
Class<?> c = obj.getClass();
InvocationHandler ih = new InvocationHandler() {
private Exception called = new Exception("No method called");
@Override
protected void finalize() {
if (called != null) {
called.printStackTrace(System.err);
}
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Exception {
try {
called = null;
Object ret = method.invoke(obj, args);
verifier.verify(ret, null, method, args);
return ret;
} catch (InvocationTargetException e) {
verifier.verify(null, e.getTargetException(), method, args);
Class<?> retClass = method.getReturnType();
if (!retClass.isPrimitive()) {
return null;
}
if (retClass == boolean.class) {
return false;
} else if (retClass == byte.class) {
return (byte) 0;
} else if (retClass == char.class) {
return (char) 0;
} else if (retClass == short.class) {
return (short) 0;
} else if (retClass == int.class) {
return 0;
} else if (retClass == long.class) {
return 0L;
} else if (retClass == float.class) {
return 0F;
} else if (retClass == double.class) {
return 0D;
}
return null;
}
}
};
if (!ProxyCodeGenerator.isGenerated(c)) {
Class<?>[] interfaces = c.getInterfaces();
if (Modifier.isFinal(c.getModifiers()) || (interfaces.length > 0 && getClass() != c)) {
// interface class proxies
if (interfaces.length == 0) {
throw new RuntimeException("Can not create a proxy for the class " +
c.getSimpleName() +
" because it doesn't implement any interfaces and is final");
}
return (T) Proxy.newProxyInstance(c.getClassLoader(), interfaces, ih);
}
}
try {
Class<?> pc = ProxyCodeGenerator.getClassProxy(c);
Constructor<?> cons = pc.getConstructor(new Class<?>[] { InvocationHandler.class });
return (T) cons.newInstance(new Object[] { ih });
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Create a proxy class that extends the given class.
*
* @param clazz the class
*/
protected void createClassProxy(Class<?> clazz) {
try {
ProxyCodeGenerator.getClassProxy(clazz);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| true | true | protected String getURL(String name, boolean admin) {
String url;
if (name.startsWith("jdbc:")) {
if (config.mvStore) {
name = addOption(name, "MV_STORE", "true");
}
return name;
}
if (config.memory) {
name = "mem:" + name;
} else {
int idx = name.indexOf(':');
if (idx < 0 || idx > 10) {
// index > 10 if in options
name = getBaseDir() + "/" + name;
}
}
if (config.networked) {
if (config.ssl) {
url = "ssl://localhost:9192/" + name;
} else {
url = "tcp://localhost:9192/" + name;
}
} else if (config.googleAppEngine) {
url = "gae://" + name + ";FILE_LOCK=NO;AUTO_SERVER=FALSE;DB_CLOSE_ON_EXIT=FALSE";
} else {
url = name;
}
if (config.mvStore) {
url = addOption(name, "MV_STORE", "true");
}
if (!config.memory) {
if (config.smallLog && admin) {
url = addOption(url, "MAX_LOG_SIZE", "1");
}
}
if (config.traceSystemOut) {
url = addOption(url, "TRACE_LEVEL_SYSTEM_OUT", "2");
}
if (config.traceLevelFile > 0 && admin) {
url = addOption(url, "TRACE_LEVEL_FILE", "" + config.traceLevelFile);
url = addOption(url, "TRACE_MAX_FILE_SIZE", "8");
}
url = addOption(url, "LOG", "1");
if (config.throttleDefault > 0) {
url = addOption(url, "THROTTLE", "" + config.throttleDefault);
} else if (config.throttle > 0) {
url = addOption(url, "THROTTLE", "" + config.throttle);
}
url = addOption(url, "LOCK_TIMEOUT", "50");
if (config.diskUndo && admin) {
url = addOption(url, "MAX_MEMORY_UNDO", "3");
}
if (config.big && admin) {
// force operations to disk
url = addOption(url, "MAX_OPERATION_MEMORY", "1");
}
if (config.mvcc) {
url = addOption(url, "MVCC", "TRUE");
}
if (config.cacheType != null && admin) {
url = addOption(url, "CACHE_TYPE", config.cacheType);
}
if (config.diskResult && admin) {
url = addOption(url, "MAX_MEMORY_ROWS", "100");
url = addOption(url, "CACHE_SIZE", "0");
}
if (config.cipher != null) {
url = addOption(url, "CIPHER", config.cipher);
}
if (config.defrag) {
url = addOption(url, "DEFRAG_ALWAYS", "TRUE");
}
if (config.nestedJoins) {
url = addOption(url, "NESTED_JOINS", "TRUE");
}
return "jdbc:h2:" + url;
}
| protected String getURL(String name, boolean admin) {
String url;
if (name.startsWith("jdbc:")) {
if (config.mvStore) {
name = addOption(name, "MV_STORE", "true");
}
return name;
}
if (config.memory) {
name = "mem:" + name;
} else {
int idx = name.indexOf(':');
if (idx < 0 || idx > 10) {
// index > 10 if in options
name = getBaseDir() + "/" + name;
}
}
if (config.networked) {
if (config.ssl) {
url = "ssl://localhost:9192/" + name;
} else {
url = "tcp://localhost:9192/" + name;
}
} else if (config.googleAppEngine) {
url = "gae://" + name + ";FILE_LOCK=NO;AUTO_SERVER=FALSE;DB_CLOSE_ON_EXIT=FALSE";
} else {
url = name;
}
if (config.mvStore) {
url = addOption(url, "MV_STORE", "true");
}
if (!config.memory) {
if (config.smallLog && admin) {
url = addOption(url, "MAX_LOG_SIZE", "1");
}
}
if (config.traceSystemOut) {
url = addOption(url, "TRACE_LEVEL_SYSTEM_OUT", "2");
}
if (config.traceLevelFile > 0 && admin) {
url = addOption(url, "TRACE_LEVEL_FILE", "" + config.traceLevelFile);
url = addOption(url, "TRACE_MAX_FILE_SIZE", "8");
}
url = addOption(url, "LOG", "1");
if (config.throttleDefault > 0) {
url = addOption(url, "THROTTLE", "" + config.throttleDefault);
} else if (config.throttle > 0) {
url = addOption(url, "THROTTLE", "" + config.throttle);
}
url = addOption(url, "LOCK_TIMEOUT", "50");
if (config.diskUndo && admin) {
url = addOption(url, "MAX_MEMORY_UNDO", "3");
}
if (config.big && admin) {
// force operations to disk
url = addOption(url, "MAX_OPERATION_MEMORY", "1");
}
if (config.mvcc) {
url = addOption(url, "MVCC", "TRUE");
}
if (config.cacheType != null && admin) {
url = addOption(url, "CACHE_TYPE", config.cacheType);
}
if (config.diskResult && admin) {
url = addOption(url, "MAX_MEMORY_ROWS", "100");
url = addOption(url, "CACHE_SIZE", "0");
}
if (config.cipher != null) {
url = addOption(url, "CIPHER", config.cipher);
}
if (config.defrag) {
url = addOption(url, "DEFRAG_ALWAYS", "TRUE");
}
if (config.nestedJoins) {
url = addOption(url, "NESTED_JOINS", "TRUE");
}
return "jdbc:h2:" + url;
}
|
diff --git a/main/java/org/archive/crawler/hadoop/CollectionIndexItemSearcher.java b/main/java/org/archive/crawler/hadoop/CollectionIndexItemSearcher.java
index d05082a..8cba88c 100644
--- a/main/java/org/archive/crawler/hadoop/CollectionIndexItemSearcher.java
+++ b/main/java/org/archive/crawler/hadoop/CollectionIndexItemSearcher.java
@@ -1,179 +1,180 @@
/**
*
*/
package org.archive.crawler.hadoop;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.mortbay.util.ajax.JSON;
/**
* Searches items in given collection with private custom web app that indexes individual collection
* for faster look up.
*
* @author kenji
*
*/
public class CollectionIndexItemSearcher implements ItemSearcher {
private static final Log LOG = LogFactory.getLog(CollectionIndexItemSearcher.class);
protected PetaboxFileSystem fs;
protected URI fsUri;
//String serviceUri = "http://crawl400.us.archive.org/crawling/wide/getitems.py/";
String serviceUri = "http://archive.org/~kenji/getitems.php?c=";
protected int maxRetries = 10;
protected int retryDelay = 2000; // milliseconds
/* (non-Javadoc)
* @see org.archive.crawler.hadoop.ItemSearcher#initialize(org.archive.crawler.hadoop.PetaboxFileSystem, java.net.URI, org.apache.hadoop.conf.Configuration)
*/
@Override
public void initialize(PetaboxFileSystem fs, URI fsUri, Configuration conf) {
this.fs = fs;
this.fsUri = fsUri;
if (conf != null) {
serviceUri = conf.get(CollectionIndexItemSearcher.class.getName()+".serviceUri", serviceUri);
}
}
protected URI buildSearchURI(String itemid) throws URISyntaxException {
return URI.create(serviceUri + itemid);
}
/* (non-Javadoc)
* @see org.archive.crawler.hadoop.ItemSearcher#searchItems(java.lang.String)
*/
@Override
public FileStatus[] searchItems(String itemid) throws IOException {
List<FileStatus> result = null;
URI uri;
try {
uri = buildSearchURI(itemid);
LOG.debug("search uri=" + uri);
} catch (URISyntaxException ex) {
throw new IOException("failed to build URI for itemid=" + itemid, ex);
}
HttpClient client = fs.getHttpClient();
HttpGet get = fs.createHttpGet(uri);
HttpEntity entity = null;
int retries = 0;
do {
if (retries > 0) {
if (retries > maxRetries) {
throw new IOException(uri + ": retry exhausted, giving up.");
}
try {
Thread.sleep(retryDelay);
} catch (InterruptedException ex) {
}
}
HttpResponse resp;
try {
resp = client.execute(get);
} catch (IOException ex) {
LOG.warn("connection to " + uri + " failed", ex);
++retries;
continue;
}
StatusLine st = resp.getStatusLine();
entity = resp.getEntity();
switch (st.getStatusCode()) {
case 200:
if (retries > 0) {
LOG.info(uri + ": succeeded after " + retries + " retry(ies)");
}
// it appears search engine often fails to return JSON formatted output despite
// status code 200. detect it here.
Reader reader = new InputStreamReader(entity.getContent(), "UTF-8");
BufferedReader lines = new BufferedReader(reader);
result = new ArrayList<FileStatus>();
String line;
int ln = 0;
try {
while ((line = lines.readLine()) != null) {
ln++;
if (!line.startsWith("{")) {
LOG.warn(uri + ": non-JSON line at " + ln);
continue;
}
@SuppressWarnings("unchecked")
Map<String, Object> jo = (Map<String, Object>)JSON.parse(line);
String iid = (String)jo.get("id");
+ // m is in seconds. be sure to multiply it by 1000 for FileStatus.
Long mtime = (Long)jo.get("m");
if (mtime == null) {
LOG.warn(uri + ": m undefined or null at line " + ln);
mtime = 0L;
}
Path qf = new Path(fsUri.toString(), "/" + iid);
LOG.debug("collection:" + itemid + " qf=" + qf);
- FileStatus fst = new FileStatus(0, true, 2, 4096, mtime, qf);
+ FileStatus fst = new FileStatus(0, true, 2, 4096, mtime * 1000, qf);
result.add(fst);
}
} catch (IOException ex) {
LOG.warn(uri + "error reading response", ex);
++retries;
continue;
} catch (IllegalStateException ex) {
// JSON.parse throws this for parse error.
LOG.warn(uri + ": JSON.parse failed at line " + ln, ex);
++retries;
continue;
} finally {
lines.close();
}
break;
case 502:
case 503:
case 504:
if (entity != null)
entity.getContent().close();
++retries;
LOG.warn(uri + " failed " + st.getStatusCode() + " "
+ st.getReasonPhrase() + ", retry " + retries);
entity = null;
continue;
default:
entity.getContent().close();
throw new IOException(st.getStatusCode() + " " + st.getReasonPhrase());
}
} while (result == null);
LOG.info(String.format("searchItems(collection=%s): returning %d items", itemid, result.size()));
return result.toArray(new FileStatus[result.size()]);
}
// main method for quick test against production service.
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
URI fsUri = URI.create("petabox://archive.org/");
PetaboxFileSystem fs = new PetaboxFileSystem();
fs.initialize(fsUri, conf);
CollectionIndexItemSearcher searcher = new CollectionIndexItemSearcher();
searcher.initialize(fs, fsUri, conf);
FileStatus[] items = searcher.searchItems("wide00005");
for (int i = 0; i < items.length; i++) {
System.out.println(items[i].getPath());
}
}
}
| false | true | public FileStatus[] searchItems(String itemid) throws IOException {
List<FileStatus> result = null;
URI uri;
try {
uri = buildSearchURI(itemid);
LOG.debug("search uri=" + uri);
} catch (URISyntaxException ex) {
throw new IOException("failed to build URI for itemid=" + itemid, ex);
}
HttpClient client = fs.getHttpClient();
HttpGet get = fs.createHttpGet(uri);
HttpEntity entity = null;
int retries = 0;
do {
if (retries > 0) {
if (retries > maxRetries) {
throw new IOException(uri + ": retry exhausted, giving up.");
}
try {
Thread.sleep(retryDelay);
} catch (InterruptedException ex) {
}
}
HttpResponse resp;
try {
resp = client.execute(get);
} catch (IOException ex) {
LOG.warn("connection to " + uri + " failed", ex);
++retries;
continue;
}
StatusLine st = resp.getStatusLine();
entity = resp.getEntity();
switch (st.getStatusCode()) {
case 200:
if (retries > 0) {
LOG.info(uri + ": succeeded after " + retries + " retry(ies)");
}
// it appears search engine often fails to return JSON formatted output despite
// status code 200. detect it here.
Reader reader = new InputStreamReader(entity.getContent(), "UTF-8");
BufferedReader lines = new BufferedReader(reader);
result = new ArrayList<FileStatus>();
String line;
int ln = 0;
try {
while ((line = lines.readLine()) != null) {
ln++;
if (!line.startsWith("{")) {
LOG.warn(uri + ": non-JSON line at " + ln);
continue;
}
@SuppressWarnings("unchecked")
Map<String, Object> jo = (Map<String, Object>)JSON.parse(line);
String iid = (String)jo.get("id");
Long mtime = (Long)jo.get("m");
if (mtime == null) {
LOG.warn(uri + ": m undefined or null at line " + ln);
mtime = 0L;
}
Path qf = new Path(fsUri.toString(), "/" + iid);
LOG.debug("collection:" + itemid + " qf=" + qf);
FileStatus fst = new FileStatus(0, true, 2, 4096, mtime, qf);
result.add(fst);
}
} catch (IOException ex) {
LOG.warn(uri + "error reading response", ex);
++retries;
continue;
} catch (IllegalStateException ex) {
// JSON.parse throws this for parse error.
LOG.warn(uri + ": JSON.parse failed at line " + ln, ex);
++retries;
continue;
} finally {
lines.close();
}
break;
case 502:
case 503:
case 504:
if (entity != null)
entity.getContent().close();
++retries;
LOG.warn(uri + " failed " + st.getStatusCode() + " "
+ st.getReasonPhrase() + ", retry " + retries);
entity = null;
continue;
default:
entity.getContent().close();
throw new IOException(st.getStatusCode() + " " + st.getReasonPhrase());
}
} while (result == null);
LOG.info(String.format("searchItems(collection=%s): returning %d items", itemid, result.size()));
return result.toArray(new FileStatus[result.size()]);
}
| public FileStatus[] searchItems(String itemid) throws IOException {
List<FileStatus> result = null;
URI uri;
try {
uri = buildSearchURI(itemid);
LOG.debug("search uri=" + uri);
} catch (URISyntaxException ex) {
throw new IOException("failed to build URI for itemid=" + itemid, ex);
}
HttpClient client = fs.getHttpClient();
HttpGet get = fs.createHttpGet(uri);
HttpEntity entity = null;
int retries = 0;
do {
if (retries > 0) {
if (retries > maxRetries) {
throw new IOException(uri + ": retry exhausted, giving up.");
}
try {
Thread.sleep(retryDelay);
} catch (InterruptedException ex) {
}
}
HttpResponse resp;
try {
resp = client.execute(get);
} catch (IOException ex) {
LOG.warn("connection to " + uri + " failed", ex);
++retries;
continue;
}
StatusLine st = resp.getStatusLine();
entity = resp.getEntity();
switch (st.getStatusCode()) {
case 200:
if (retries > 0) {
LOG.info(uri + ": succeeded after " + retries + " retry(ies)");
}
// it appears search engine often fails to return JSON formatted output despite
// status code 200. detect it here.
Reader reader = new InputStreamReader(entity.getContent(), "UTF-8");
BufferedReader lines = new BufferedReader(reader);
result = new ArrayList<FileStatus>();
String line;
int ln = 0;
try {
while ((line = lines.readLine()) != null) {
ln++;
if (!line.startsWith("{")) {
LOG.warn(uri + ": non-JSON line at " + ln);
continue;
}
@SuppressWarnings("unchecked")
Map<String, Object> jo = (Map<String, Object>)JSON.parse(line);
String iid = (String)jo.get("id");
// m is in seconds. be sure to multiply it by 1000 for FileStatus.
Long mtime = (Long)jo.get("m");
if (mtime == null) {
LOG.warn(uri + ": m undefined or null at line " + ln);
mtime = 0L;
}
Path qf = new Path(fsUri.toString(), "/" + iid);
LOG.debug("collection:" + itemid + " qf=" + qf);
FileStatus fst = new FileStatus(0, true, 2, 4096, mtime * 1000, qf);
result.add(fst);
}
} catch (IOException ex) {
LOG.warn(uri + "error reading response", ex);
++retries;
continue;
} catch (IllegalStateException ex) {
// JSON.parse throws this for parse error.
LOG.warn(uri + ": JSON.parse failed at line " + ln, ex);
++retries;
continue;
} finally {
lines.close();
}
break;
case 502:
case 503:
case 504:
if (entity != null)
entity.getContent().close();
++retries;
LOG.warn(uri + " failed " + st.getStatusCode() + " "
+ st.getReasonPhrase() + ", retry " + retries);
entity = null;
continue;
default:
entity.getContent().close();
throw new IOException(st.getStatusCode() + " " + st.getReasonPhrase());
}
} while (result == null);
LOG.info(String.format("searchItems(collection=%s): returning %d items", itemid, result.size()));
return result.toArray(new FileStatus[result.size()]);
}
|
diff --git a/domino/src/edu/rpi/cct/bedework/caldav/DominoSysIntfImpl.java b/domino/src/edu/rpi/cct/bedework/caldav/DominoSysIntfImpl.java
index 9e8c11d..1375bf5 100644
--- a/domino/src/edu/rpi/cct/bedework/caldav/DominoSysIntfImpl.java
+++ b/domino/src/edu/rpi/cct/bedework/caldav/DominoSysIntfImpl.java
@@ -1,807 +1,807 @@
/*
Copyright (c) 2000-2005 University of Washington. All rights reserved.
Redistribution and use of this distribution in source and binary forms,
with or without modification, are permitted provided that:
The above copyright notice and this permission notice appear in
all copies and supporting documentation;
The name, identifiers, and trademarks of the University of Washington
are not used in advertising or publicity without the express prior
written permission of the University of Washington;
Recipients acknowledge that this distribution is made available as a
research courtesy, "as is", potentially with defects, without
any obligation on the part of the University of Washington to
provide support, services, or repair;
THE UNIVERSITY OF WASHINGTON DISCLAIMS ALL WARRANTIES, EXPRESS OR
IMPLIED, WITH REGARD TO THIS SOFTWARE, INCLUDING WITHOUT LIMITATION
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE, AND IN NO EVENT SHALL THE UNIVERSITY OF
WASHINGTON BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL
DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
PROFITS, WHETHER IN AN ACTION OF CONTRACT, TORT (INCLUDING
NEGLIGENCE) OR STRICT LIABILITY, ARISING OUT OF OR IN CONNECTION WITH
THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/* **********************************************************************
Copyright 2005 Rensselaer Polytechnic Institute. All worldwide rights reserved.
Redistribution and use of this distribution in source and binary forms,
with or without modification, are permitted provided that:
The above copyright notice and this permission notice appear in all
copies and supporting documentation;
The name, identifiers, and trademarks of Rensselaer Polytechnic
Institute are not used in advertising or publicity without the
express prior written permission of Rensselaer Polytechnic Institute;
DISCLAIMER: The software is distributed" AS IS" without any express or
implied warranty, including but not limited to, any implied warranties
of merchantability or fitness for a particular purpose or any warrant)'
of non-infringement of any current or pending patent rights. The authors
of the software make no representations about the suitability of this
software for any particular purpose. The entire risk as to the quality
and performance of the software is with the user. Should the software
prove defective, the user assumes the cost of all necessary servicing,
repair or correction. In particular, neither Rensselaer Polytechnic
Institute, nor the authors of the software are liable for any indirect,
special, consequential, or incidental damages related to the software,
to the maximum extent the law permits.
*/
package edu.rpi.cct.bedework.caldav;
import org.bedework.caldav.server.SysIntf;
import org.bedework.calfacade.BwCalendar;
import org.bedework.calfacade.BwDateTime;
import org.bedework.calfacade.BwEvent;
import org.bedework.calfacade.BwEventProxy;
import org.bedework.calfacade.BwFreeBusy;
import org.bedework.calfacade.BwFreeBusyComponent;
import org.bedework.calfacade.BwUser;
import org.bedework.calfacade.RecurringRetrievalMode;
import org.bedework.calfacade.ScheduleResult;
import org.bedework.calfacade.base.BwShareableDbentity;
import org.bedework.calfacade.svc.EventInfo;
import org.bedework.calfacade.timezones.CalTimezones;
import org.bedework.calfacade.timezones.ResourceTimezones;
import org.bedework.calfacade.util.ChangeTable;
import org.bedework.http.client.dav.DavClient;
import org.bedework.http.client.dav.DavReq;
import org.bedework.http.client.dav.DavResp;
import org.bedework.icalendar.IcalTranslator;
import org.bedework.icalendar.Icalendar;
import org.bedework.icalendar.SAICalCallback;
import edu.rpi.cct.webdav.servlet.common.WebdavUtils;
import edu.rpi.cct.webdav.servlet.shared.PrincipalPropertySearch;
import edu.rpi.cct.webdav.servlet.shared.WebdavException;
import edu.rpi.cct.webdav.servlet.shared.WebdavIntfException;
import edu.rpi.cmt.access.Acl.CurrentAccess;
import edu.rpi.sss.util.xml.XmlUtil;
import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.DateTime;
import net.fortuna.ical4j.model.Period;
import net.fortuna.ical4j.model.TimeZone;
import org.apache.commons.httpclient.NoHttpResponseException;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringReader;
import java.net.URI;
import java.net.URLDecoder;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
/** Domino implementation of SysIntf. This interacts with a servlet on Domino
* which presents requested calendar information.
*
* @author Mike Douglass douglm at rpi.edu
*/
public class DominoSysIntfImpl implements SysIntf {
/* There is one entry per host + port. Because we are likely to make a number
* of calls to the same host + port combination it makes sense to preserve
* the objects between calls.
*/
private HashMap<String, DavClient> cioTable = new HashMap<String, DavClient>();
private ResourceTimezones timezones;
// XXX get from properties
private static String defaultTimezone = "America/Los_Angeles";
/* These could come from a db
*/
private static class DominoInfo implements Serializable {
String host;
int port;
String urlPrefix;
boolean secure;
DominoInfo(String host, int port, String urlPrefix, boolean secure) {
this.host = host;
this.port = port;
this.urlPrefix = urlPrefix;
this.secure= secure;
}
/**
* @return String
*/
public String getHost() {
return host;
}
/**
* @return int
*/
public int getPort() {
return port;
}
/**
* @return String
*/
public boolean getSecure() {
return secure;
}
/**
* @return String
*/
public String getUrlPrefix() {
return urlPrefix;
}
}
private static final DominoInfo egenconsultingInfo =
new DominoInfo("t1.egenconsulting.com", 80, "/servlet/Freetime", false);
private static final DominoInfo showcase2Info =
new DominoInfo("showcase2.notes.net", 443, "/servlet/Freetime", true);
private static final HashMap<String, DominoInfo> serversInfo =
new HashMap<String, DominoInfo>();
static {
serversInfo.put("egenconsulting", egenconsultingInfo);
serversInfo.put("showcase2", showcase2Info);
}
private boolean debug;
private transient Logger log;
private IcalTranslator trans;
private String urlPrefix;
public void init(HttpServletRequest req,
String envPrefix,
String account,
boolean debug) throws WebdavIntfException {
try {
this.debug = debug;
trans = new IcalTranslator(new SAICalCallback(getTimezones(), null),
debug);
urlPrefix = WebdavUtils.getUrlPrefix(req);
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
public String getUrlPrefix() {
return urlPrefix;
}
public boolean getDirectoryBrowsingDisallowed() throws WebdavIntfException {
return false;
}
public String caladdrToUser(String caladdr) throws WebdavIntfException {
return caladdr;
}
public CalUserInfo getCalUserInfo(String caladdr) throws WebdavIntfException {
return new CalUserInfo(caladdrToUser(caladdr),
null, null, null, null);
}
public Collection<String> getPrincipalCollectionSet(String resourceUri)
throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public Collection<CalUserInfo> getPrincipals(String resourceUri,
PrincipalPropertySearch pps)
throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public boolean validUser(String account) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public boolean validGroup(String account) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
/* ====================================================================
* Scheduling
* ==================================================================== */
public ScheduleResult schedule(BwEvent event) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public Collection<BwEventProxy> addEvent(BwCalendar cal,
BwEvent event,
Collection<BwEventProxy> overrides,
boolean rollbackOnError) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public void updateEvent(BwEvent event,
Collection overrides,
ChangeTable changes) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public Collection<EventInfo> getEvents(BwCalendar cal,
BwDateTime startDate, BwDateTime endDate,
RecurringRetrievalMode recurRetrieval)
throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public EventInfo getEvent(BwCalendar cal, String val,
RecurringRetrievalMode recurRetrieval)
throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public void deleteEvent(BwEvent ev) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public void deleteCalendar(BwCalendar cal) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public BwFreeBusy getFreeBusy(BwCalendar cal,
String account,
BwDateTime start,
BwDateTime end) throws WebdavException {
/* Create a url something like:
* http://t1.egenconsulting.com:80/servlet/Freetime/John?start-min=2006-07-11T12:00:00Z&start-max=2006-07-16T12:00:00Z
*/
try {
String serviceName = getServiceName(cal.getPath());
DominoInfo di = serversInfo.get(serviceName);
if (di == null) {
throw WebdavIntfException.badRequest();
}
DavReq req = new DavReq();
req.setMethod("GET");
req.setUrl(di.getUrlPrefix() + "/" +
cal.getOwner().getAccount() + "?" +
"start-min=" + makeDateTime(start) + "&" +
"start-max=" + makeDateTime(end));
req.addHeader("Accept",
"text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5");
req.addHeader("Accept-Language", "en-us,en;q=0.7,es;q=0.3");
req.addHeader("Accept-Encoding", "gzip,deflate");
req.addHeader("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7");
DavResp resp = send(req, di);
/* He switched to XML! - parse back to a vfreebusy object */
String vfb = makeVfb(new InputStreamReader(resp.getContentStream()));
if (debug) {
debugMsg(vfb);
}
Icalendar ic = trans.fromIcal(null, new StringReader(vfb));
/* Domino returns free time - invert to get busy time
* First we'll order all the periods in the result.
*/
TreeSet<Period> periods = new TreeSet<Period>();
Iterator fbit = ic.iterator();
while (fbit.hasNext()) {
Object o = fbit.next();
if (o instanceof BwFreeBusy) {
BwFreeBusy fb = (BwFreeBusy)o;
Collection<BwFreeBusyComponent> times = fb.getTimes();
if (times != null) {
for (BwFreeBusyComponent fbcomp: times) {
if (fbcomp.getType() != BwFreeBusyComponent.typeFree) {
throw WebdavIntfException.serverError();
}
for (Period p: fbcomp.getPeriods()) {
periods.add(p);
}
}
}
}
}
BwFreeBusy fb = new BwFreeBusy();
- fb.setStart(start);
- fb.setEnd(end);
+ fb.setDtstart(start);
+ fb.setDtend(end);
BwFreeBusyComponent fbcomp = new BwFreeBusyComponent();
fb.addTime(fbcomp);
fbcomp.setType(BwFreeBusyComponent.typeBusy);
/* Fill in the gaps between the free periods with busy time. */
DateTime bstart = (DateTime)start.makeDate();
Iterator pit = periods.iterator();
while (pit.hasNext()) {
Period p = (Period)pit.next();
if (!bstart.equals(p.getStart())) {
/* First free period may be at start of requested time */
Period busyp = new Period(bstart, p.getStart());
fbcomp.addPeriod(busyp);
}
bstart = p.getEnd();
}
/* Fill in to end of requested period */
DateTime bend = (DateTime)end.makeDate();
if (!bstart.equals(bend)) {
Period busyp = new Period(bstart, bend);
fbcomp.addPeriod(busyp);
}
return fb;
} catch (WebdavIntfException wie) {
throw wie;
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
public CurrentAccess checkAccess(BwShareableDbentity ent,
int desiredAccess,
boolean returnResult)
throws WebdavException {
throw new WebdavIntfException("unimplemented");
}
public void updateAccess(BwCalendar cal,
Collection aces) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public void updateAccess(BwEvent ev,
Collection aces) throws WebdavIntfException{
throw new WebdavIntfException("unimplemented");
}
public void makeCollection(String name, boolean calendarCollection,
String parentPath) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public BwCalendar getCalendar(String path) throws WebdavIntfException {
// XXX Just fake it up for the moment.
/* The path should always start with /server-name/user
*/
List l = splitUri(path, true);
String namePart = (String)l.get(l.size() - 1);
BwCalendar cal = new BwCalendar();
cal.setName(namePart);
cal.setPath(path);
String owner = (String)l.get(1);
cal.setOwner(new BwUser(owner));
return cal;
}
public Collection<BwCalendar> getCalendars(BwCalendar cal) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public Calendar toCalendar(EventInfo ev) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public Icalendar fromIcal(BwCalendar cal, Reader rdr) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public CalTimezones getTimezones() throws WebdavIntfException {
try {
if (timezones == null) {
timezones = new ResourceTimezones(debug, null);
timezones.setDefaultTimeZoneId(defaultTimezone);
}
return timezones;
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
public TimeZone getDefaultTimeZone() throws WebdavIntfException {
try {
return getTimezones().getDefaultTimeZone();
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
public String toStringTzCalendar(String tzid) throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public int getMaxUserEntitySize() throws WebdavIntfException {
throw new WebdavIntfException("unimplemented");
}
public void close() throws WebdavIntfException {
}
/* ====================================================================
* Private methods
* ==================================================================== */
/* <?xml version="1.0" encoding="UTF-8"?>
<iCalendar>
<vcalendar method="REPLY" version="2.0" prodid="-//IBM Domino Freetime//NONSGML Prototype//EN">
<vfreebusy>
<attendee>John</attendee>
<url>http://t1.egenconsulting.com:80/servlet/Freetime/John</url>
<dtstamp>20060713T185253Z</dtstamp>
<dtstart>20060717T030000Z</dtstart>
<dtend>20060723T030000Z</dtend>
<freebusy fbtype="FREE">20060717T130000Z/20060717T160000Z</freebusy>
<freebusy fbtype="FREE">20060717T170000Z/20060717T210000Z</freebusy>
<freebusy fbtype="FREE">20060718T130000Z/20060718T160000Z</freebusy>
<freebusy fbtype="FREE">20060718T170000Z/20060718T210000Z</freebusy>
<freebusy fbtype="FREE">20060719T130000Z/20060719T160000Z</freebusy>
<freebusy fbtype="FREE">20060719T170000Z/20060719T210000Z</freebusy>
<freebusy fbtype="FREE">20060720T130000Z/20060720T160000Z</freebusy>
<freebusy fbtype="FREE">20060720T170000Z/20060720T210000Z</freebusy>
<freebusy fbtype="FREE">20060721T130000Z/20060721T160000Z</freebusy>
<freebusy fbtype="FREE">20060721T170000Z/20060721T210000Z</freebusy>
</vfreebusy>
</vcalendar>
</iCalendar>
*/
private String makeVfb(Reader rdr) throws WebdavException{
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new InputSource(rdr));
StringBuffer sb = new StringBuffer();
sb.append("BEGIN:VCALENDAR\n");
sb.append("VERSION:2.0\n");
sb.append("PRODID:-//Bedework Domino/caldav interface//EN\n");
sb.append("BEGIN:VFREEBUSY\n");
Element root = doc.getDocumentElement(); // </iCalendar>
Element child = getOnlyChild(root); // </vcalendar>
child = getOnlyChild(child); // </vfreebusy>
Element[] children = getChildren(child);
for (int i = 0; i < children.length; i++) {
Element curnode = children[i];
String nm = curnode.getLocalName();
if (nm.equals("attendee")) {
sb.append("ATTENDEE:");
sb.append(getElementContent(curnode));
sb.append("\n");
} else if (nm.equals("url")) {
sb.append("URL:");
sb.append(getElementContent(curnode));
sb.append("\n");
} else if (nm.equals("dtstamp")) {
sb.append("DTSTAMP:");
sb.append(getElementContent(curnode));
sb.append("\n");
} else if (nm.equals("dtstart")) {
sb.append("DTSTART:");
sb.append(getElementContent(curnode));
sb.append("\n");
} else if (nm.equals("dtend")) {
sb.append("DTEND:");
sb.append(getElementContent(curnode));
sb.append("\n");
} else if (nm.equals("freebusy")) {
sb.append("FREEBUSY;FBTYPE=FREE:");
sb.append(getElementContent(curnode));
sb.append("\n");
}
}
sb.append("END:VFREEBUSY\n");
sb.append("END:VCALENDAR\n");
return sb.toString();
} catch (SAXException e) {
throw new WebdavException(HttpServletResponse.SC_BAD_REQUEST);
} catch (Throwable t) {
throw new WebdavException(t);
} finally {
if (rdr != null) {
try {
rdr.close();
} catch (Throwable t) {}
}
}
}
protected Element[] getChildren(Node nd) throws WebdavException {
try {
return XmlUtil.getElementsArray(nd);
} catch (Throwable t) {
if (debug) {
getLogger().error(this, t);
}
throw new WebdavException(t);
}
}
protected Element getOnlyChild(Node nd) throws WebdavException {
try {
return XmlUtil.getOnlyElement(nd);
} catch (Throwable t) {
if (debug) {
getLogger().error(this, t);
}
throw new WebdavException(t);
}
}
protected String getElementContent(Element el) throws WebdavException {
try {
return XmlUtil.getElementContent(el);
} catch (Throwable t) {
if (debug) {
getLogger().error(this, t);
}
throw new WebdavException(t);
}
}
private String makeDateTime(BwDateTime dt) throws WebdavIntfException {
try {
/*
String utcdt = dt.getDate();
StringBuffer sb = new StringBuffer();
// from 20060716T120000Z make 2006-07-16T12:00:00Z
// 0 4 6 1 3
sb.append(utcdt.substring(0, 4));
sb.append("-");
sb.append(utcdt.substring(4, 6));
sb.append("-");
sb.append(utcdt.substring(6, 11));
sb.append(":");
sb.append(utcdt.substring(11, 13));
sb.append(":");
sb.append(utcdt.substring(13));
return sb.toString();
*/
return dt.getDate();
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
/*
private net.fortuna.ical4j.model.DateTime makeIcalDateTime(String val)
throws WebdavIntfException {
try {
net.fortuna.ical4j.model.DateTime icaldt =
new net.fortuna.ical4j.model.DateTime(val);
//icaldt.setUtc(true);
return icaldt;
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
*/
private List splitUri(String uri, boolean decoded) throws WebdavIntfException {
try {
/*Remove all "." and ".." components */
if (decoded) {
uri = new URI(null, null, uri, null).toString();
}
uri = new URI(uri).normalize().getPath();
if (debug) {
debugMsg("Normalized uri=" + uri);
}
uri = URLDecoder.decode(uri, "UTF-8");
if (!uri.startsWith("/")) {
return null;
}
if (uri.endsWith("/")) {
uri = uri.substring(0, uri.length() - 1);
}
String[] ss = uri.split("/");
int pathLength = ss.length - 1; // First element is empty string
if (pathLength < 2) {
throw WebdavIntfException.badRequest();
}
List l = Arrays.asList(ss);
return l.subList(1, l.size());
} catch (Throwable t) {
if (debug) {
error(t);
}
throw WebdavIntfException.badRequest();
}
}
private String getServiceName(String path) {
int pos = path.indexOf("/", 1);
if (pos < 0) {
return path.substring(1);
}
return path.substring(1, pos);
}
/**
* @param r
* @param di
* @return DavResp
* @throws Throwable
*/
private DavResp send(DavReq r, DominoInfo di) throws Throwable {
DavClient cio = getCio(di.getHost(), di.getPort(), di.getSecure());
int responseCode;
try {
if (r.getAuth()) {
responseCode = cio.sendRequest(r.getMethod(), r.getUrl(),
r.getUser(), r.getPw(),
r.getHeaders(), r.getDepth(),
r.getContentType(),
r.getContentLength(), r.getContentBytes());
} else {
responseCode = cio.sendRequest(r.getMethod(), r.getUrl(),
r.getHeaders(), r.getDepth(),
r.getContentType(), r.getContentLength(),
r.getContentBytes());
}
if (responseCode != HttpServletResponse.SC_OK) {
if (debug) {
debugMsg("Got response " + responseCode +
" for url " + r.getUrl() +
", host " + di.getHost());
}
throw new WebdavIntfException(responseCode);
}
} catch (WebdavIntfException wie) {
throw wie;
} catch (NoHttpResponseException nhre) {
throw new WebdavIntfException(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
return cio.getResponse();
}
private DavClient getCio(String host, int port, boolean secure) throws Throwable {
DavClient cio = cioTable.get(host + port + secure);
if (cio == null) {
cio = new DavClient(host, port, 30 * 1000, secure, debug);
cioTable.put(host + port + secure, cio);
}
return cio;
}
/* ====================================================================
* Protected methods
* ==================================================================== */
protected Logger getLogger() {
if (log == null) {
log = Logger.getLogger(this.getClass());
}
return log;
}
protected void trace(String msg) {
getLogger().debug(msg);
}
protected void debugMsg(String msg) {
getLogger().debug(msg);
}
protected void warn(String msg) {
getLogger().warn(msg);
}
protected void error(String msg) {
getLogger().error(msg);
}
protected void error(Throwable t) {
getLogger().error(this, t);
}
protected void logIt(String msg) {
getLogger().info(msg);
}
}
| true | true | public BwFreeBusy getFreeBusy(BwCalendar cal,
String account,
BwDateTime start,
BwDateTime end) throws WebdavException {
/* Create a url something like:
* http://t1.egenconsulting.com:80/servlet/Freetime/John?start-min=2006-07-11T12:00:00Z&start-max=2006-07-16T12:00:00Z
*/
try {
String serviceName = getServiceName(cal.getPath());
DominoInfo di = serversInfo.get(serviceName);
if (di == null) {
throw WebdavIntfException.badRequest();
}
DavReq req = new DavReq();
req.setMethod("GET");
req.setUrl(di.getUrlPrefix() + "/" +
cal.getOwner().getAccount() + "?" +
"start-min=" + makeDateTime(start) + "&" +
"start-max=" + makeDateTime(end));
req.addHeader("Accept",
"text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5");
req.addHeader("Accept-Language", "en-us,en;q=0.7,es;q=0.3");
req.addHeader("Accept-Encoding", "gzip,deflate");
req.addHeader("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7");
DavResp resp = send(req, di);
/* He switched to XML! - parse back to a vfreebusy object */
String vfb = makeVfb(new InputStreamReader(resp.getContentStream()));
if (debug) {
debugMsg(vfb);
}
Icalendar ic = trans.fromIcal(null, new StringReader(vfb));
/* Domino returns free time - invert to get busy time
* First we'll order all the periods in the result.
*/
TreeSet<Period> periods = new TreeSet<Period>();
Iterator fbit = ic.iterator();
while (fbit.hasNext()) {
Object o = fbit.next();
if (o instanceof BwFreeBusy) {
BwFreeBusy fb = (BwFreeBusy)o;
Collection<BwFreeBusyComponent> times = fb.getTimes();
if (times != null) {
for (BwFreeBusyComponent fbcomp: times) {
if (fbcomp.getType() != BwFreeBusyComponent.typeFree) {
throw WebdavIntfException.serverError();
}
for (Period p: fbcomp.getPeriods()) {
periods.add(p);
}
}
}
}
}
BwFreeBusy fb = new BwFreeBusy();
fb.setStart(start);
fb.setEnd(end);
BwFreeBusyComponent fbcomp = new BwFreeBusyComponent();
fb.addTime(fbcomp);
fbcomp.setType(BwFreeBusyComponent.typeBusy);
/* Fill in the gaps between the free periods with busy time. */
DateTime bstart = (DateTime)start.makeDate();
Iterator pit = periods.iterator();
while (pit.hasNext()) {
Period p = (Period)pit.next();
if (!bstart.equals(p.getStart())) {
/* First free period may be at start of requested time */
Period busyp = new Period(bstart, p.getStart());
fbcomp.addPeriod(busyp);
}
bstart = p.getEnd();
}
/* Fill in to end of requested period */
DateTime bend = (DateTime)end.makeDate();
if (!bstart.equals(bend)) {
Period busyp = new Period(bstart, bend);
fbcomp.addPeriod(busyp);
}
return fb;
} catch (WebdavIntfException wie) {
throw wie;
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
| public BwFreeBusy getFreeBusy(BwCalendar cal,
String account,
BwDateTime start,
BwDateTime end) throws WebdavException {
/* Create a url something like:
* http://t1.egenconsulting.com:80/servlet/Freetime/John?start-min=2006-07-11T12:00:00Z&start-max=2006-07-16T12:00:00Z
*/
try {
String serviceName = getServiceName(cal.getPath());
DominoInfo di = serversInfo.get(serviceName);
if (di == null) {
throw WebdavIntfException.badRequest();
}
DavReq req = new DavReq();
req.setMethod("GET");
req.setUrl(di.getUrlPrefix() + "/" +
cal.getOwner().getAccount() + "?" +
"start-min=" + makeDateTime(start) + "&" +
"start-max=" + makeDateTime(end));
req.addHeader("Accept",
"text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5");
req.addHeader("Accept-Language", "en-us,en;q=0.7,es;q=0.3");
req.addHeader("Accept-Encoding", "gzip,deflate");
req.addHeader("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7");
DavResp resp = send(req, di);
/* He switched to XML! - parse back to a vfreebusy object */
String vfb = makeVfb(new InputStreamReader(resp.getContentStream()));
if (debug) {
debugMsg(vfb);
}
Icalendar ic = trans.fromIcal(null, new StringReader(vfb));
/* Domino returns free time - invert to get busy time
* First we'll order all the periods in the result.
*/
TreeSet<Period> periods = new TreeSet<Period>();
Iterator fbit = ic.iterator();
while (fbit.hasNext()) {
Object o = fbit.next();
if (o instanceof BwFreeBusy) {
BwFreeBusy fb = (BwFreeBusy)o;
Collection<BwFreeBusyComponent> times = fb.getTimes();
if (times != null) {
for (BwFreeBusyComponent fbcomp: times) {
if (fbcomp.getType() != BwFreeBusyComponent.typeFree) {
throw WebdavIntfException.serverError();
}
for (Period p: fbcomp.getPeriods()) {
periods.add(p);
}
}
}
}
}
BwFreeBusy fb = new BwFreeBusy();
fb.setDtstart(start);
fb.setDtend(end);
BwFreeBusyComponent fbcomp = new BwFreeBusyComponent();
fb.addTime(fbcomp);
fbcomp.setType(BwFreeBusyComponent.typeBusy);
/* Fill in the gaps between the free periods with busy time. */
DateTime bstart = (DateTime)start.makeDate();
Iterator pit = periods.iterator();
while (pit.hasNext()) {
Period p = (Period)pit.next();
if (!bstart.equals(p.getStart())) {
/* First free period may be at start of requested time */
Period busyp = new Period(bstart, p.getStart());
fbcomp.addPeriod(busyp);
}
bstart = p.getEnd();
}
/* Fill in to end of requested period */
DateTime bend = (DateTime)end.makeDate();
if (!bstart.equals(bend)) {
Period busyp = new Period(bstart, bend);
fbcomp.addPeriod(busyp);
}
return fb;
} catch (WebdavIntfException wie) {
throw wie;
} catch (Throwable t) {
throw new WebdavIntfException(t);
}
}
|
diff --git a/services/GORFX/src/de/zib/gndms/GORFX/action/StagedTransferORQCalculator.java b/services/GORFX/src/de/zib/gndms/GORFX/action/StagedTransferORQCalculator.java
index 24e9f4ae..3a8357fd 100644
--- a/services/GORFX/src/de/zib/gndms/GORFX/action/StagedTransferORQCalculator.java
+++ b/services/GORFX/src/de/zib/gndms/GORFX/action/StagedTransferORQCalculator.java
@@ -1,98 +1,98 @@
package de.zib.gndms.GORFX.action;
import de.zib.gndms.c3resource.jaxb.Workspace;
import de.zib.gndms.infra.configlet.C3MDSConfiglet;
import de.zib.gndms.infra.system.GNDMSystem;
import de.zib.gndms.kit.network.NetworkAuxiliariesProvider;
import de.zib.gndms.logic.model.gorfx.AbstractORQCalculator;
import de.zib.gndms.logic.model.gorfx.c3grid.AbstractProviderStageInORQCalculator;
import de.zib.gndms.model.common.types.FutureTime;
import de.zib.gndms.model.common.types.TransientContract;
import de.zib.gndms.model.gorfx.types.GORFXConstantURIs;
import de.zib.gndms.model.gorfx.types.SliceStageInORQ;
import org.apache.axis.types.URI;
import org.globus.wsrf.container.ServiceHost;
import org.joda.time.Duration;
import java.util.Set;
/**
* @author: Maik Jorra <[email protected]>
* @version: $Id$
* <p/>
* User: mjorra, Date: 11.11.2008, Time: 14:57:06
*/
public class StagedTransferORQCalculator extends
AbstractORQCalculator<SliceStageInORQ, StagedTransferORQCalculator> {
// todo find pretty solution for system hack
private static GNDMSystem system;
public StagedTransferORQCalculator( ) {
super( SliceStageInORQ.class );
}
//
// offertime = stageing time + transfer-time
@Override
public TransientContract createOffer() throws Exception {
// create provider staging orq using this this offer type
- AbstractProviderStageInORQCalculator psi_calc = ( AbstractProviderStageInORQCalculator )
- getSystem().getInstanceDir().newORQCalculator( getSystem().getEntityManagerFactory(), GORFXConstantURIs.PROVIDER_STAGE_IN_URI );
+ AbstractProviderStageInORQCalculator psi_calc = AbstractProviderStageInORQCalculator.class.cast(
+ getSystem().getInstanceDir().newORQCalculator( getSystem().getEntityManagerFactory(), GORFXConstantURIs.PROVIDER_STAGE_IN_URI ));
psi_calc.setKey( getKey() );
psi_calc.setORQArguments( getORQArguments() );
TransientContract c = psi_calc.createOffer();
if( c.hasExpectedSize() ) {
long s = c.getExpectedSize( );
String src = ServiceHost.getBaseURL( ).getHost();
URI dst_uri = destinationURI( getORQArguments().getGridSite() );
String dst = dst_uri.getHost( );
Float ebw = getNetAux().getBandWidthEstimater().estimateBandWidthFromTo( src, dst );
if( ebw == null )
throw new RuntimeException( "No connection beween" + src + " and " + dst );
getORQArguments().setActGridSiteURI( dst );
long ms = NetworkAuxiliariesProvider.calculateTransferTime( s, ebw );
c.setDeadline( FutureTime.atOffset( new Duration( ms ) ) );
}
return c;
}
protected static GNDMSystem getSystem( ) {
if( system == null )
throw new IllegalStateException ( "GNDMS not present" );
return system;
}
public static void setSystem( GNDMSystem sys ) {
if( system != null )
throw new IllegalStateException ( "GNDMS already present" );
system = sys;
}
public URI destinationURI( String gs ) throws URI.MalformedURIException {
C3MDSConfiglet cfg = getConfigletProvider().getConfiglet( C3MDSConfiglet.class, C3MDSConfiglet.class.getName( ) );
Set<Workspace.Archive> a = cfg.getCatalog().getArchivesByOid().get( gs );
Workspace w = cfg.getCatalog().getWorkspaceByArchive().get( (Workspace.Archive) a.toArray()[0] );
return new URI( w.getBaseUrl() );
}
}
| true | true | public TransientContract createOffer() throws Exception {
// create provider staging orq using this this offer type
AbstractProviderStageInORQCalculator psi_calc = ( AbstractProviderStageInORQCalculator )
getSystem().getInstanceDir().newORQCalculator( getSystem().getEntityManagerFactory(), GORFXConstantURIs.PROVIDER_STAGE_IN_URI );
psi_calc.setKey( getKey() );
psi_calc.setORQArguments( getORQArguments() );
TransientContract c = psi_calc.createOffer();
if( c.hasExpectedSize() ) {
long s = c.getExpectedSize( );
String src = ServiceHost.getBaseURL( ).getHost();
URI dst_uri = destinationURI( getORQArguments().getGridSite() );
String dst = dst_uri.getHost( );
Float ebw = getNetAux().getBandWidthEstimater().estimateBandWidthFromTo( src, dst );
if( ebw == null )
throw new RuntimeException( "No connection beween" + src + " and " + dst );
getORQArguments().setActGridSiteURI( dst );
long ms = NetworkAuxiliariesProvider.calculateTransferTime( s, ebw );
c.setDeadline( FutureTime.atOffset( new Duration( ms ) ) );
}
return c;
}
| public TransientContract createOffer() throws Exception {
// create provider staging orq using this this offer type
AbstractProviderStageInORQCalculator psi_calc = AbstractProviderStageInORQCalculator.class.cast(
getSystem().getInstanceDir().newORQCalculator( getSystem().getEntityManagerFactory(), GORFXConstantURIs.PROVIDER_STAGE_IN_URI ));
psi_calc.setKey( getKey() );
psi_calc.setORQArguments( getORQArguments() );
TransientContract c = psi_calc.createOffer();
if( c.hasExpectedSize() ) {
long s = c.getExpectedSize( );
String src = ServiceHost.getBaseURL( ).getHost();
URI dst_uri = destinationURI( getORQArguments().getGridSite() );
String dst = dst_uri.getHost( );
Float ebw = getNetAux().getBandWidthEstimater().estimateBandWidthFromTo( src, dst );
if( ebw == null )
throw new RuntimeException( "No connection beween" + src + " and " + dst );
getORQArguments().setActGridSiteURI( dst );
long ms = NetworkAuxiliariesProvider.calculateTransferTime( s, ebw );
c.setDeadline( FutureTime.atOffset( new Duration( ms ) ) );
}
return c;
}
|
diff --git a/herald-sso/herald-sso-api/src/main/java/cn/edu/seu/herald/sso/domain/ConcreteSsoContext.java b/herald-sso/herald-sso-api/src/main/java/cn/edu/seu/herald/sso/domain/ConcreteSsoContext.java
index beeb0da..9b5bd10 100644
--- a/herald-sso/herald-sso-api/src/main/java/cn/edu/seu/herald/sso/domain/ConcreteSsoContext.java
+++ b/herald-sso/herald-sso-api/src/main/java/cn/edu/seu/herald/sso/domain/ConcreteSsoContext.java
@@ -1,75 +1,75 @@
/*
* Copyright 2012 Herald, Southeast University.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.edu.seu.herald.sso.domain;
import cn.edu.seu.herald.session.Session;
import cn.edu.seu.herald.sso.SsoServiceConstants;
import java.util.Enumeration;
import java.util.Stack;
/**
*
* @author rAy
*/
public class ConcreteSsoContext implements SingleSignOnContext {
private Session session;
public ConcreteSsoContext(Session session) {
this.session = session;
}
@Override
public StudentUser getLogOnStudentUser() {
int cardNumber = (Integer) session.getAttribute(
SsoServiceConstants.CARD_NUMBER_NODE_NAME);
String studentId = (String) session.getAttribute(
SsoServiceConstants.STUDENT_ID_NODE_NAME);
String fullName = (String) session.getAttribute(
SsoServiceConstants.STUDENT_FULL_NAME_NODE_NAME);
StudentUser sUser = new StudentUser();
sUser.setCardNumber(cardNumber);
sUser.setStudentId(studentId);
sUser.setFullName(fullName);
return sUser;
}
@Override
public Object getAttribute(String name) {
String sessionAttributeName =
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX + name;
return session.getAttribute(sessionAttributeName);
}
@Override
public Enumeration<String> getAttributeNames() {
Stack<String> stack = new Stack<String>();
Enumeration<String> sessioEnum = session.getAttributeNames();
while (sessioEnum.hasMoreElements()) {
String name = sessioEnum.nextElement();
boolean isSsoPropertyNode = (name != null) && name.startsWith(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX);
if (isSsoPropertyNode) {
String ssoPropertyName = name.substring(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX
.length());
- stack.add(name);
+ stack.add(ssoPropertyName);
}
}
return stack.elements();
}
}
| true | true | public Enumeration<String> getAttributeNames() {
Stack<String> stack = new Stack<String>();
Enumeration<String> sessioEnum = session.getAttributeNames();
while (sessioEnum.hasMoreElements()) {
String name = sessioEnum.nextElement();
boolean isSsoPropertyNode = (name != null) && name.startsWith(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX);
if (isSsoPropertyNode) {
String ssoPropertyName = name.substring(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX
.length());
stack.add(name);
}
}
return stack.elements();
}
| public Enumeration<String> getAttributeNames() {
Stack<String> stack = new Stack<String>();
Enumeration<String> sessioEnum = session.getAttributeNames();
while (sessioEnum.hasMoreElements()) {
String name = sessioEnum.nextElement();
boolean isSsoPropertyNode = (name != null) && name.startsWith(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX);
if (isSsoPropertyNode) {
String ssoPropertyName = name.substring(
SsoServiceConstants.SSO_CONTEXT_PROPERTIES_PREFIX
.length());
stack.add(ssoPropertyName);
}
}
return stack.elements();
}
|
diff --git a/src/edu/pitt/isp/sverchkov/math/DoubleIterable.java b/src/edu/pitt/isp/sverchkov/math/DoubleIterable.java
index 80a65e4..a78cc4c 100644
--- a/src/edu/pitt/isp/sverchkov/math/DoubleIterable.java
+++ b/src/edu/pitt/isp/sverchkov/math/DoubleIterable.java
@@ -1,58 +1,58 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.pitt.isp.sverchkov.math;
import java.lang.reflect.InvocationTargetException;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A class to wrap an iterable the elements of which implement doubleValue
* @author YUS24
*/
public class DoubleIterable implements Iterable<Double> {
private final Iterable iterable;
public DoubleIterable( Iterable iterable ){
this.iterable = iterable;
}
@Override
public Iterator<Double> iterator() {
return iterable.iterator();
}
private static class DoubleIterator<T> implements Iterator<Double> {
private final Iterator<T> iterator;
private DoubleIterator( Iterator<T> iterator ){
this.iterator = iterator;
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Double next() {
T next = iterator.next();
try {
- return (Double) next.getClass().getMethod( "doubleValue" ).invoke(next );
+ return (Double) ( next.getClass().getMethod( "doubleValue" ).invoke( next ) );
} catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException ex) {
throw new IllegalArgumentException(ex);
}
}
@Override
public void remove() {
iterator.remove();
}
}
}
| true | true | public Double next() {
T next = iterator.next();
try {
return (Double) next.getClass().getMethod( "doubleValue" ).invoke(next );
} catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException ex) {
throw new IllegalArgumentException(ex);
}
}
| public Double next() {
T next = iterator.next();
try {
return (Double) ( next.getClass().getMethod( "doubleValue" ).invoke( next ) );
} catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException ex) {
throw new IllegalArgumentException(ex);
}
}
|
diff --git a/src/main/java/org/thymeleaf/processor/attr/AbstractMarkupRemovalAttrProcessor.java b/src/main/java/org/thymeleaf/processor/attr/AbstractMarkupRemovalAttrProcessor.java
index 0ecf9b22..3d49bf93 100755
--- a/src/main/java/org/thymeleaf/processor/attr/AbstractMarkupRemovalAttrProcessor.java
+++ b/src/main/java/org/thymeleaf/processor/attr/AbstractMarkupRemovalAttrProcessor.java
@@ -1,116 +1,117 @@
/*
* =============================================================================
*
* Copyright (c) 2011-2013, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.processor.attr;
import java.util.ArrayList;
import java.util.List;
import org.thymeleaf.Arguments;
import org.thymeleaf.dom.Element;
import org.thymeleaf.dom.Node;
import org.thymeleaf.processor.IAttributeNameProcessorMatcher;
import org.thymeleaf.processor.ProcessorResult;
/**
*
* @author Daniel Fernández
*
* @since 2.1.0
*
*/
public abstract class AbstractMarkupRemovalAttrProcessor
extends AbstractAttrProcessor {
protected static enum RemovalType { ALL, ALLBUTFIRST, ELEMENT, BODY, NONE }
protected AbstractMarkupRemovalAttrProcessor(final IAttributeNameProcessorMatcher matcher) {
super(matcher);
}
protected AbstractMarkupRemovalAttrProcessor(final String attributeName) {
super(attributeName);
}
@Override
public final ProcessorResult processAttribute(final Arguments arguments, final Element element, final String attributeName) {
final RemovalType removalType = getRemovalType(arguments, element, attributeName);
if (removalType == null) {
return ProcessorResult.OK;
}
switch (removalType) {
case NONE:
+ element.removeAttribute(attributeName);
return ProcessorResult.OK;
case ALL:
element.getParent().removeChild(element);
return ProcessorResult.OK;
case ALLBUTFIRST:
final List<Node> children = element.getChildren();
final List<Node> newChildren = new ArrayList<Node>(children.size());
boolean childElementFound = false;
for (final Node child : children) {
if (child instanceof Element) {
if (!childElementFound) {
newChildren.add(child);
childElementFound = true;
}
} else {
newChildren.add(child);
}
}
element.setChildren(newChildren);
element.removeAttribute(attributeName);
return ProcessorResult.OK;
case ELEMENT:
element.getParent().extractChild(element);
return ProcessorResult.OK;
case BODY:
element.clearChildren();
element.removeAttribute(attributeName);
return ProcessorResult.OK;
}
return ProcessorResult.OK;
}
protected abstract RemovalType getRemovalType(final Arguments arguments, final Element element, final String attributeName);
}
| true | true | public final ProcessorResult processAttribute(final Arguments arguments, final Element element, final String attributeName) {
final RemovalType removalType = getRemovalType(arguments, element, attributeName);
if (removalType == null) {
return ProcessorResult.OK;
}
switch (removalType) {
case NONE:
return ProcessorResult.OK;
case ALL:
element.getParent().removeChild(element);
return ProcessorResult.OK;
case ALLBUTFIRST:
final List<Node> children = element.getChildren();
final List<Node> newChildren = new ArrayList<Node>(children.size());
boolean childElementFound = false;
for (final Node child : children) {
if (child instanceof Element) {
if (!childElementFound) {
newChildren.add(child);
childElementFound = true;
}
} else {
newChildren.add(child);
}
}
element.setChildren(newChildren);
element.removeAttribute(attributeName);
return ProcessorResult.OK;
case ELEMENT:
element.getParent().extractChild(element);
return ProcessorResult.OK;
case BODY:
element.clearChildren();
element.removeAttribute(attributeName);
return ProcessorResult.OK;
}
return ProcessorResult.OK;
}
| public final ProcessorResult processAttribute(final Arguments arguments, final Element element, final String attributeName) {
final RemovalType removalType = getRemovalType(arguments, element, attributeName);
if (removalType == null) {
return ProcessorResult.OK;
}
switch (removalType) {
case NONE:
element.removeAttribute(attributeName);
return ProcessorResult.OK;
case ALL:
element.getParent().removeChild(element);
return ProcessorResult.OK;
case ALLBUTFIRST:
final List<Node> children = element.getChildren();
final List<Node> newChildren = new ArrayList<Node>(children.size());
boolean childElementFound = false;
for (final Node child : children) {
if (child instanceof Element) {
if (!childElementFound) {
newChildren.add(child);
childElementFound = true;
}
} else {
newChildren.add(child);
}
}
element.setChildren(newChildren);
element.removeAttribute(attributeName);
return ProcessorResult.OK;
case ELEMENT:
element.getParent().extractChild(element);
return ProcessorResult.OK;
case BODY:
element.clearChildren();
element.removeAttribute(attributeName);
return ProcessorResult.OK;
}
return ProcessorResult.OK;
}
|
diff --git a/xbean-reflect/src/main/java/org/apache/xbean/recipe/MapRecipe.java b/xbean-reflect/src/main/java/org/apache/xbean/recipe/MapRecipe.java
index 88b09409..9c88c466 100644
--- a/xbean-reflect/src/main/java/org/apache/xbean/recipe/MapRecipe.java
+++ b/xbean-reflect/src/main/java/org/apache/xbean/recipe/MapRecipe.java
@@ -1,310 +1,310 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xbean.recipe;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.Dictionary;
import java.util.AbstractMap;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @version $Rev: 6687 $ $Date: 2005-12-28T21:08:56.733437Z $
*/
public class MapRecipe extends AbstractRecipe {
private final List<Object[]> entries;
private String typeName;
private Class typeClass;
private final EnumSet<Option> options = EnumSet.noneOf(Option.class);
public MapRecipe() {
entries = new ArrayList<Object[]>();
}
public MapRecipe(String type) {
this.typeName = type;
entries = new ArrayList<Object[]>();
}
public MapRecipe(Class type) {
if (type == null) throw new NullPointerException("type is null");
this.typeClass = type;
entries = new ArrayList<Object[]>();
}
public MapRecipe(Map<?,?> map) {
if (map == null) throw new NullPointerException("map is null");
entries = new ArrayList<Object[]>(map.size());
// If the specified set has a default constructor we will recreate the set, otherwise we use a LinkedHashMap or TreeMap
if (RecipeHelper.hasDefaultConstructor(map.getClass())) {
this.typeClass = map.getClass();
} else if (map instanceof SortedMap) {
this.typeClass = TreeMap.class;
} else if (map instanceof ConcurrentMap) {
this.typeClass = ConcurrentHashMap.class;
} else {
this.typeClass = LinkedHashMap.class;
}
putAll(map);
}
public MapRecipe(MapRecipe mapRecipe) {
if (mapRecipe == null) throw new NullPointerException("mapRecipe is null");
this.typeName = mapRecipe.typeName;
this.typeClass = mapRecipe.typeClass;
entries = new ArrayList<Object[]>(mapRecipe.entries);
}
public void allow(Option option){
options.add(option);
}
public void disallow(Option option){
options.remove(option);
}
public List<Recipe> getNestedRecipes() {
List<Recipe> nestedRecipes = new ArrayList<Recipe>(entries.size() * 2);
for (Object[] entry : entries) {
Object key = entry[0];
if (key instanceof Recipe) {
Recipe recipe = (Recipe) key;
nestedRecipes.add(recipe);
}
Object value = entry[1];
if (value instanceof Recipe) {
Recipe recipe = (Recipe) value;
nestedRecipes.add(recipe);
}
}
return nestedRecipes;
}
public List<Recipe> getConstructorRecipes() {
if (!options.contains(Option.LAZY_ASSIGNMENT)) {
return getNestedRecipes();
}
return Collections.emptyList();
}
public boolean canCreate(Type type) {
Class myType = getType(type);
return RecipeHelper.isAssignable(type, myType);
}
protected Object internalCreate(Type expectedType, boolean lazyRefAllowed) throws ConstructionException {
Class mapType = getType(expectedType);
if (!RecipeHelper.hasDefaultConstructor(mapType)) {
throw new ConstructionException("Type does not have a default constructor " + mapType.getName());
}
Object o;
try {
o = mapType.newInstance();
} catch (Exception e) {
throw new ConstructionException("Error while creating set instance: " + mapType.getName());
}
Map instance;
if (o instanceof Map) {
instance = (Map) o;
} else if (o instanceof Dictionary) {
instance = new DummyDictionaryAsMap((Dictionary) o);
} else {
throw new ConstructionException("Specified map type does not implement the Map interface: " + mapType.getName());
}
// get component type
Type keyType = Object.class;
Type valueType = Object.class;
- Type[] typeParameters = RecipeHelper.getTypeParameters(Collection.class, expectedType);
+ Type[] typeParameters = RecipeHelper.getTypeParameters(Map.class, expectedType);
if (typeParameters != null && typeParameters.length == 2) {
if (typeParameters[0] instanceof Class) {
keyType = typeParameters[0];
}
if (typeParameters[1] instanceof Class) {
valueType = typeParameters[1];
}
}
// add to execution context if name is specified
if (getName() != null) {
ExecutionContext.getContext().addObject(getName(), instance);
}
// add map entries
boolean refAllowed = options.contains(Option.LAZY_ASSIGNMENT);
for (Object[] entry : entries) {
Object key = RecipeHelper.convert(keyType, entry[0], refAllowed);
Object value = RecipeHelper.convert(valueType, entry[1], refAllowed);
if (key instanceof Reference) {
// when the key reference and optional value reference are both resolved
// the key/value pair will be added to the map
Reference.Action action = new UpdateMap(instance, key, value);
((Reference) key).setAction(action);
if (value instanceof Reference) {
((Reference) value).setAction(action);
}
} else if (value instanceof Reference) {
// add a null place holder assigned to the key
//noinspection unchecked
instance.put(key, null);
// when value is resolved we will replace the null value with they real value
Reference.Action action = new UpdateValue(instance, key);
((Reference) value).setAction(action);
} else {
//noinspection unchecked
instance.put(key, value);
}
}
return instance;
}
private Class getType(Type expectedType) {
Class expectedClass = RecipeHelper.toClass(expectedType);
if (typeClass != null || typeName != null) {
Class type = typeClass;
if (type == null) {
try {
type = RecipeHelper.loadClass(typeName);
} catch (ClassNotFoundException e) {
throw new ConstructionException("Type class could not be found: " + typeName);
}
}
// if expectedType is a subclass of the assigned type,
// we use it assuming it has a default constructor
if (type.isAssignableFrom(expectedClass)) {
return getMap(expectedClass);
} else {
return getMap(type);
}
}
// no type explicitly set
return getMap(expectedClass);
}
private Class getMap(Class type) {
if (RecipeHelper.hasDefaultConstructor(type)) {
return type;
} else if (SortedMap.class.isAssignableFrom(type)) {
return TreeMap.class;
} else if (ConcurrentMap.class.isAssignableFrom(type)) {
return ConcurrentHashMap.class;
} else {
return LinkedHashMap.class;
}
}
public void put(Object key, Object value) {
if (key == null) throw new NullPointerException("key is null");
entries.add(new Object[] { key, value});
}
public void putAll(Map<?,?> map) {
if (map == null) throw new NullPointerException("map is null");
for (Map.Entry<?,?> entry : map.entrySet()) {
Object key = entry.getKey();
Object value = entry.getValue();
put(key, value);
}
}
private static class UpdateValue implements Reference.Action {
private final Map map;
private final Object key;
public UpdateValue(Map map, Object key) {
this.map = map;
this.key = key;
}
@SuppressWarnings({"unchecked"})
public void onSet(Reference ref) {
map.put(key, ref.get());
}
}
private static class UpdateMap implements Reference.Action {
private final Map map;
private final Object key;
private final Object value;
public UpdateMap(Map map, Object key, Object value) {
this.map = map;
this.key = key;
this.value = value;
}
@SuppressWarnings({"unchecked"})
public void onSet(Reference ignored) {
Object key = this.key;
if (key instanceof Reference) {
Reference reference = (Reference) key;
if (!reference.isResolved()) {
return;
}
key = reference.get();
}
Object value = this.value;
if (value instanceof Reference) {
Reference reference = (Reference) value;
if (!reference.isResolved()) {
return;
}
value = reference.get();
}
map.put(key, value);
}
}
public static class DummyDictionaryAsMap extends AbstractMap {
private final Dictionary dictionary;
public DummyDictionaryAsMap(Dictionary dictionary) {
this.dictionary = dictionary;
}
@Override
public Object put(Object key, Object value) {
return dictionary.put(key, value);
}
public Set entrySet() {
throw new UnsupportedOperationException();
}
}
}
| true | true | protected Object internalCreate(Type expectedType, boolean lazyRefAllowed) throws ConstructionException {
Class mapType = getType(expectedType);
if (!RecipeHelper.hasDefaultConstructor(mapType)) {
throw new ConstructionException("Type does not have a default constructor " + mapType.getName());
}
Object o;
try {
o = mapType.newInstance();
} catch (Exception e) {
throw new ConstructionException("Error while creating set instance: " + mapType.getName());
}
Map instance;
if (o instanceof Map) {
instance = (Map) o;
} else if (o instanceof Dictionary) {
instance = new DummyDictionaryAsMap((Dictionary) o);
} else {
throw new ConstructionException("Specified map type does not implement the Map interface: " + mapType.getName());
}
// get component type
Type keyType = Object.class;
Type valueType = Object.class;
Type[] typeParameters = RecipeHelper.getTypeParameters(Collection.class, expectedType);
if (typeParameters != null && typeParameters.length == 2) {
if (typeParameters[0] instanceof Class) {
keyType = typeParameters[0];
}
if (typeParameters[1] instanceof Class) {
valueType = typeParameters[1];
}
}
// add to execution context if name is specified
if (getName() != null) {
ExecutionContext.getContext().addObject(getName(), instance);
}
// add map entries
boolean refAllowed = options.contains(Option.LAZY_ASSIGNMENT);
for (Object[] entry : entries) {
Object key = RecipeHelper.convert(keyType, entry[0], refAllowed);
Object value = RecipeHelper.convert(valueType, entry[1], refAllowed);
if (key instanceof Reference) {
// when the key reference and optional value reference are both resolved
// the key/value pair will be added to the map
Reference.Action action = new UpdateMap(instance, key, value);
((Reference) key).setAction(action);
if (value instanceof Reference) {
((Reference) value).setAction(action);
}
} else if (value instanceof Reference) {
// add a null place holder assigned to the key
//noinspection unchecked
instance.put(key, null);
// when value is resolved we will replace the null value with they real value
Reference.Action action = new UpdateValue(instance, key);
((Reference) value).setAction(action);
} else {
//noinspection unchecked
instance.put(key, value);
}
}
return instance;
}
| protected Object internalCreate(Type expectedType, boolean lazyRefAllowed) throws ConstructionException {
Class mapType = getType(expectedType);
if (!RecipeHelper.hasDefaultConstructor(mapType)) {
throw new ConstructionException("Type does not have a default constructor " + mapType.getName());
}
Object o;
try {
o = mapType.newInstance();
} catch (Exception e) {
throw new ConstructionException("Error while creating set instance: " + mapType.getName());
}
Map instance;
if (o instanceof Map) {
instance = (Map) o;
} else if (o instanceof Dictionary) {
instance = new DummyDictionaryAsMap((Dictionary) o);
} else {
throw new ConstructionException("Specified map type does not implement the Map interface: " + mapType.getName());
}
// get component type
Type keyType = Object.class;
Type valueType = Object.class;
Type[] typeParameters = RecipeHelper.getTypeParameters(Map.class, expectedType);
if (typeParameters != null && typeParameters.length == 2) {
if (typeParameters[0] instanceof Class) {
keyType = typeParameters[0];
}
if (typeParameters[1] instanceof Class) {
valueType = typeParameters[1];
}
}
// add to execution context if name is specified
if (getName() != null) {
ExecutionContext.getContext().addObject(getName(), instance);
}
// add map entries
boolean refAllowed = options.contains(Option.LAZY_ASSIGNMENT);
for (Object[] entry : entries) {
Object key = RecipeHelper.convert(keyType, entry[0], refAllowed);
Object value = RecipeHelper.convert(valueType, entry[1], refAllowed);
if (key instanceof Reference) {
// when the key reference and optional value reference are both resolved
// the key/value pair will be added to the map
Reference.Action action = new UpdateMap(instance, key, value);
((Reference) key).setAction(action);
if (value instanceof Reference) {
((Reference) value).setAction(action);
}
} else if (value instanceof Reference) {
// add a null place holder assigned to the key
//noinspection unchecked
instance.put(key, null);
// when value is resolved we will replace the null value with they real value
Reference.Action action = new UpdateValue(instance, key);
((Reference) value).setAction(action);
} else {
//noinspection unchecked
instance.put(key, value);
}
}
return instance;
}
|
diff --git a/EasyMobArmory/src/main/java/com/runetooncraft/plugins/EasyMobArmory/EMAListener.java b/EasyMobArmory/src/main/java/com/runetooncraft/plugins/EasyMobArmory/EMAListener.java
index c95ef2e..99f7d54 100644
--- a/EasyMobArmory/src/main/java/com/runetooncraft/plugins/EasyMobArmory/EMAListener.java
+++ b/EasyMobArmory/src/main/java/com/runetooncraft/plugins/EasyMobArmory/EMAListener.java
@@ -1,427 +1,429 @@
package com.runetooncraft.plugins.EasyMobArmory;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import net.minecraft.server.v1_6_R2.Item;
import net.minecraft.server.v1_6_R2.NBTTagCompound;
import net.minecraft.server.v1_6_R2.TileEntityChest;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.craftbukkit.v1_6_R2.inventory.CraftInventory;
import org.bukkit.craftbukkit.v1_6_R2.inventory.CraftItemStack;
import org.bukkit.entity.Chicken;
import org.bukkit.entity.Cow;
import org.bukkit.entity.Creeper;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Horse;
import org.bukkit.entity.Pig;
import org.bukkit.entity.PigZombie;
import org.bukkit.entity.Player;
import org.bukkit.entity.Sheep;
import org.bukkit.entity.Skeleton;
import org.bukkit.entity.Spider;
import org.bukkit.entity.Zombie;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.CreatureSpawnEvent;
import org.bukkit.event.entity.CreatureSpawnEvent.SpawnReason;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.player.PlayerInteractEntityEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.DoubleChestInventory;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.SkullMeta;
import com.bergerkiller.bukkit.common.entity.CommonEntity;
import com.runetooncraft.plugins.EasyMobArmory.SpawnerHandler.SpawnerCache;
import com.runetooncraft.plugins.EasyMobArmory.SpawnerHandler.SpawnerHandler;
import com.runetooncraft.plugins.EasyMobArmory.core.Config;
import com.runetooncraft.plugins.EasyMobArmory.core.InventorySerializer;
import com.runetooncraft.plugins.EasyMobArmory.core.Messenger;
import com.runetooncraft.plugins.EasyMobArmory.egghandler.EggHandler;
public class EMAListener implements Listener {
Config config;
public static HashMap<Player, Entity> PlayerMobDataMap = new HashMap<Player, Entity>();
public static HashMap<Player, Boolean> Armoryenabled = new HashMap<Player, Boolean>();
public static HashMap<Player, SpawnerCache> SpawnerSelected = new HashMap<Player, SpawnerCache>();
public EMAListener(Config config) {
this.config = config;
}
@EventHandler
public void OnPlayerEntityInteract(PlayerInteractEntityEvent event) {
Entity e = event.getRightClicked();
Player p = event.getPlayer();
if(Armoryenabled.get(p) != null){ if(Armoryenabled.get(p)) {
if(e.getType().equals(EntityType.ZOMBIE)) {
ItemStack i = p.getItemInHand();
Zombie z = (Zombie) e;
if(EMA.Helmets.contains(i)) {
z.getEquipment().setHelmet(i);
}else if(EMA.Chestplates.contains(i)) {
z.getEquipment().setChestplate(i);
}else if(EMA.Leggings.contains(i)) {
z.getEquipment().setLeggings(i);
}else if(EMA.Boots.contains(i)) {
z.getEquipment().setBoots(i);
}else if(i.getType().equals(Material.BONE)){
Inventory inv = Bukkit.createInventory(p, 9, "zombieinv");
ItemStack[] zombieinv = z.getEquipment().getArmorContents();
inv.setContents(zombieinv);
inv.setItem(4, z.getEquipment().getItemInHand());
if(z.isBaby()) inv.setItem(5, new ItemStack(Material.REDSTONE));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, z);
}else{
z.getEquipment().setItemInHand(i);
}
}else if(e.getType().equals(EntityType.SKELETON)) {
ItemStack i = p.getItemInHand();
Skeleton s = (Skeleton) e;
if(EMA.Helmets.contains(i)) {
s.getEquipment().setHelmet(i);
}else if(EMA.Chestplates.contains(i)) {
s.getEquipment().setChestplate(i);
}else if(EMA.Leggings.contains(i)) {
s.getEquipment().setLeggings(i);
}else if(EMA.Boots.contains(i)) {
s.getEquipment().setBoots(i);
}else if(i.getType().equals(Material.BONE)){
Inventory inv = Bukkit.createInventory(p, 9, "skeletoninv");
ItemStack[] skeletoninv = s.getEquipment().getArmorContents();
inv.setContents(skeletoninv);
inv.setItem(4, s.getEquipment().getItemInHand());
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, s);
}else{
s.getEquipment().setItemInHand(i);
}
}else if(e.getType().equals(EntityType.PIG_ZOMBIE)) {
ItemStack i = p.getItemInHand();
PigZombie pz = (PigZombie) e;
if(EMA.Helmets.contains(i)) {
pz.getEquipment().setHelmet(i);
}else if(EMA.Chestplates.contains(i)) {
pz.getEquipment().setChestplate(i);
}else if(EMA.Leggings.contains(i)) {
pz.getEquipment().setLeggings(i);
}else if(EMA.Boots.contains(i)) {
pz.getEquipment().setBoots(i);
}else if(i.getType().equals(Material.BONE)){
Inventory inv = Bukkit.createInventory(p, 9, "pigzombieinv");
ItemStack[] pigzombieinv = pz.getEquipment().getArmorContents();
inv.setContents(pigzombieinv);
inv.setItem(4, pz.getEquipment().getItemInHand());
if(pz.isBaby()) inv.setItem(5, new ItemStack(Material.REDSTONE));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, pz);
}else{
pz.getEquipment().setItemInHand(i);
}
}else if(e.getType().equals(EntityType.SHEEP)) {
ItemStack i = p.getItemInHand();
Sheep sh = (Sheep) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "sheepinv");
if(!sh.isAdult()) inv.setItem(5, new ItemStack(Material.REDSTONE));
if(sh.isSheared()) inv.setItem(6, new ItemStack(Material.SHEARS));
if(sh.getAgeLock()) inv.setItem(7, new ItemStack(Material.GLOWSTONE_DUST));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, sh);
}
}else if(e.getType().equals(EntityType.PIG)) {
ItemStack i = p.getItemInHand();
Pig pig = (Pig) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "piginv");
if(!pig.isAdult()) inv.setItem(5, new ItemStack(Material.REDSTONE));
if(pig.hasSaddle()) inv.setItem(6, new ItemStack(Material.SADDLE));
if(pig.getAgeLock()) inv.setItem(7, new ItemStack(Material.GLOWSTONE_DUST));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, pig);
}
}else if(e.getType().equals(EntityType.CHICKEN)) {
ItemStack i = p.getItemInHand();
Chicken c = (Chicken) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "chickeninv");
if(!c.isAdult()) inv.setItem(5, new ItemStack(Material.REDSTONE));
if(c.getAgeLock()) inv.setItem(7, new ItemStack(Material.GLOWSTONE_DUST));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, c);
}
}else if(e.getType().equals(EntityType.COW)) {
ItemStack i = p.getItemInHand();
Cow cow = (Cow) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "cowinv");
if(!cow.isAdult()) inv.setItem(5, new ItemStack(Material.REDSTONE));
if(cow.getAgeLock()) inv.setItem(7, new ItemStack(Material.GLOWSTONE_DUST));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, cow);
}
}else if(e.getType().equals(EntityType.CREEPER)) {
ItemStack i = p.getItemInHand();
Creeper c = (Creeper) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "creeperinv");
if(c.isPowered()) inv.setItem(0, new ItemStack(Material.REDSTONE));
inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, c);
}
}else if(e.getType().equals(EntityType.HORSE)) {
ItemStack i = p.getItemInHand();
Horse h = (Horse) e;
if(i.getType().equals(Material.BONE)) {
Inventory inv = Bukkit.createInventory(p, 9, "horseinv");
if(!h.isAdult()) inv.setItem(5, new ItemStack(Material.REDSTONE));
if(h.isTamed()) inv.setItem(6, new ItemStack(Material.HAY_BLOCK));
if(h.isTamed()) {
Player owner = (Player) h.getOwner();
inv.setItem(7, setOwner(new ItemStack(Material.SKULL_ITEM, 1, (short)3), p.getName()));
}
if(h.isCarryingChest()) inv.setItem(7, new ItemStack(Material.CHEST));
// inv.setItem(8, EggHandler.GetEggitem(e,ChatColor.GOLD + "Get Mob Egg",ChatColor.AQUA + e.getType().getName()));
p.openInventory(inv);
PlayerMobDataMap.put(p, h);
}
}
}}
}
@EventHandler
public void OnInventoryCloseEvent(InventoryCloseEvent event) {
if(Armoryenabled.get(event.getPlayer()) != null){ if(Armoryenabled.get(event.getPlayer())) {
if(event.getInventory().getName().equals("zombieinv")) {
Inventory i = event.getInventory();
Zombie z = (Zombie) PlayerMobDataMap.get(event.getPlayer());
z.getEquipment().setHelmet(i.getItem(3));
z.getEquipment().setChestplate(i.getItem(2));
z.getEquipment().setLeggings(i.getItem(1));
z.getEquipment().setBoots(i.getItem(0));
z.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
z.setBaby(true);
}else{
z.setBaby(false);
}
}
else if(event.getInventory().getName().equals("skeletoninv")) {
Inventory i = event.getInventory();
Skeleton s = (Skeleton) PlayerMobDataMap.get(event.getPlayer());
s.getEquipment().setHelmet(i.getItem(3));
s.getEquipment().setChestplate(i.getItem(2));
s.getEquipment().setLeggings(i.getItem(1));
s.getEquipment().setBoots(i.getItem(0));
s.getEquipment().setItemInHand(i.getItem(4));
}
else if(event.getInventory().getName().equals("pigzombieinv")) {
Inventory i = event.getInventory();
PigZombie pz = (PigZombie) PlayerMobDataMap.get(event.getPlayer());
pz.getEquipment().setHelmet(i.getItem(3));
pz.getEquipment().setChestplate(i.getItem(2));
pz.getEquipment().setLeggings(i.getItem(1));
pz.getEquipment().setBoots(i.getItem(0));
pz.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
pz.setBaby(true);
}else{
pz.setBaby(false);
}
}
else if(event.getInventory().getName().equals("sheepinv")) {
Inventory i = event.getInventory();
Sheep sh = (Sheep) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
sh.setBaby();
}else{
sh.setAdult();
}
if(i.contains(Material.SHEARS)) {
sh.setSheared(true);
}else{
sh.setSheared(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
sh.setAgeLock(true);
}else{
sh.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("piginv")) {
Inventory i = event.getInventory();
Pig pig = (Pig) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
pig.setBaby();
}else{
pig.setAdult();
}
if(i.contains(Material.SADDLE)) {
pig.setSaddle(true);
}else{
pig.setSaddle(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
pig.setAgeLock(true);
}else{
pig.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("cowinv")) {
Inventory i = event.getInventory();
Cow cow = (Cow) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
cow.setBaby();
}else{
cow.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
cow.setAgeLock(true);
}else{
cow.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("chickeninv")) {
Inventory i = event.getInventory();
Chicken c = (Chicken) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setBaby();
}else{
c.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
c.setAgeLock(true);
}else{
c.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("creeperinv")) {
Inventory i = event.getInventory();
Creeper c = (Creeper) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setPowered(true);
}else{
c.setPowered(false);
}
}
else if(event.getInventory().getName().equals("horseinv")) {
Inventory i = event.getInventory();
Horse h = (Horse) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
h.setBaby();
}else{
h.setAdult();
}
if(i.contains(Material.HAY_BLOCK)) {
h.setTamed(true);
if(i.contains(Material.SKULL_ITEM)) {
ItemStack head = i.getItem(i.first(Material.SKULL_ITEM));
Player owner = getOwner(head);
if(owner == null) {
h.setOwner(event.getPlayer());
}else{
h.setOwner(owner);
}
}else{
h.setOwner(event.getPlayer());
}
}else{
h.setTamed(false);
}
}
else if(event.getInventory().getName().equals("Spawnerinv")) {
Inventory i = event.getInventory();
SpawnerCache sc = SpawnerSelected.get(event.getPlayer());
ItemStack[] InvItems = i.getContents();
Inventory NewInv = Bukkit.createInventory(event.getPlayer(), 54, "Spawnerinv");
for(ItemStack is : InvItems) {
- if(is.getType().equals(Material.MONSTER_EGG) && is.hasItemMeta() && is.getItemMeta().hasDisplayName() && is.getItemMeta().getDisplayName().contains(":")) {
+ if(is.getType().equals(Material.MONSTER_EGG) && is.hasItemMeta()) {
+ if(is.getItemMeta().hasDisplayName() && is.getItemMeta().getDisplayName().contains(":")) {
NewInv.addItem(is);
+ }
}
}
}
}}
}
public ItemStack setOwner(ItemStack item, String owner) {
SkullMeta meta = (SkullMeta) item.getItemMeta();
meta.setOwner(owner);
item.setItemMeta(meta);
return item;
}
public Player getOwner(ItemStack item) {
SkullMeta meta = (SkullMeta) item.getItemMeta();
if(meta.getOwner() !=null) {
return (Player) Bukkit.getOfflinePlayer(meta.getOwner());
}else{
return null;
}
}
@EventHandler
public void OnInventoryClick(InventoryClickEvent event) {
String name = event.getInventory().getName();
if(name.equals("zombieinv") || name.equals("skeletoninv") || name.equals("pigzombieinv") || name.equals("sheepinv") || name.equals("piginv") || name.equals("cowinv") || name.equals("horseinv") || name.equals("chickeninv") || name.equals("creeperinv")) {
if(event.getSlot() == 8 && event.getCurrentItem().getType() == Material.MONSTER_EGG && event.getCurrentItem().getItemMeta().hasDisplayName() && event.getInventory().getItem(8).equals(event.getCurrentItem())){
Player p = (Player) event.getWhoClicked();
Entity e = PlayerMobDataMap.get(p);
EggHandler.addegg(e);
ItemStack eggitem = EggHandler.GetEggitem(e, "EMA Egg id: " + e.getEntityId(),ChatColor.AQUA + e.getType().getName());
event.getCurrentItem().setItemMeta(eggitem.getItemMeta());
}
}
}
@EventHandler
public void OnPlayerInteract(PlayerInteractEvent event) {
Player p = event.getPlayer();
if(Armoryenabled.get(p) != null && Armoryenabled.get(p)) {
if(event.getPlayer().getItemInHand().getType().equals(Material.MONSTER_EGG) && event.hasBlock()) {
ItemStack egg = p.getItemInHand();
ItemMeta eggmeta = egg.getItemMeta();
if(eggmeta.hasDisplayName() && eggmeta.getDisplayName().contains(": ")) {
String[] name = eggmeta.getDisplayName().split(": ");
if(name.length == 2) {
if(EggHandler.GetEggList().contains(name[1])) {
Location loc = event.getClickedBlock().getLocation();
loc.setY(loc.getY() + 1);
Entity entity = EggHandler.Loadentity(name[1],loc);
}else{
}
}
}
}else if(event.getPlayer().getItemInHand().getType().equals(Material.BONE) && event.hasBlock() && event.getClickedBlock().getTypeId() == 52) {
Block b = event.getClickedBlock();
Location BlockLocation = b.getLocation();
if(SpawnerHandler.IsEMASpawner(BlockLocation).equals(false)) {
Messenger.playermessage("EMASpawer created", p);
SpawnerHandler.NewEMASpawner(b, p);
SpawnerHandler.OpenSpawnerInventory(b, p);
}else{
SpawnerHandler.OpenSpawnerInventory(b, p);
}
SpawnerSelected.put(p, SpawnerHandler.getSpawner(b.getLocation()));
}
}
}
}
| false | true | public void OnInventoryCloseEvent(InventoryCloseEvent event) {
if(Armoryenabled.get(event.getPlayer()) != null){ if(Armoryenabled.get(event.getPlayer())) {
if(event.getInventory().getName().equals("zombieinv")) {
Inventory i = event.getInventory();
Zombie z = (Zombie) PlayerMobDataMap.get(event.getPlayer());
z.getEquipment().setHelmet(i.getItem(3));
z.getEquipment().setChestplate(i.getItem(2));
z.getEquipment().setLeggings(i.getItem(1));
z.getEquipment().setBoots(i.getItem(0));
z.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
z.setBaby(true);
}else{
z.setBaby(false);
}
}
else if(event.getInventory().getName().equals("skeletoninv")) {
Inventory i = event.getInventory();
Skeleton s = (Skeleton) PlayerMobDataMap.get(event.getPlayer());
s.getEquipment().setHelmet(i.getItem(3));
s.getEquipment().setChestplate(i.getItem(2));
s.getEquipment().setLeggings(i.getItem(1));
s.getEquipment().setBoots(i.getItem(0));
s.getEquipment().setItemInHand(i.getItem(4));
}
else if(event.getInventory().getName().equals("pigzombieinv")) {
Inventory i = event.getInventory();
PigZombie pz = (PigZombie) PlayerMobDataMap.get(event.getPlayer());
pz.getEquipment().setHelmet(i.getItem(3));
pz.getEquipment().setChestplate(i.getItem(2));
pz.getEquipment().setLeggings(i.getItem(1));
pz.getEquipment().setBoots(i.getItem(0));
pz.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
pz.setBaby(true);
}else{
pz.setBaby(false);
}
}
else if(event.getInventory().getName().equals("sheepinv")) {
Inventory i = event.getInventory();
Sheep sh = (Sheep) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
sh.setBaby();
}else{
sh.setAdult();
}
if(i.contains(Material.SHEARS)) {
sh.setSheared(true);
}else{
sh.setSheared(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
sh.setAgeLock(true);
}else{
sh.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("piginv")) {
Inventory i = event.getInventory();
Pig pig = (Pig) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
pig.setBaby();
}else{
pig.setAdult();
}
if(i.contains(Material.SADDLE)) {
pig.setSaddle(true);
}else{
pig.setSaddle(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
pig.setAgeLock(true);
}else{
pig.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("cowinv")) {
Inventory i = event.getInventory();
Cow cow = (Cow) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
cow.setBaby();
}else{
cow.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
cow.setAgeLock(true);
}else{
cow.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("chickeninv")) {
Inventory i = event.getInventory();
Chicken c = (Chicken) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setBaby();
}else{
c.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
c.setAgeLock(true);
}else{
c.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("creeperinv")) {
Inventory i = event.getInventory();
Creeper c = (Creeper) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setPowered(true);
}else{
c.setPowered(false);
}
}
else if(event.getInventory().getName().equals("horseinv")) {
Inventory i = event.getInventory();
Horse h = (Horse) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
h.setBaby();
}else{
h.setAdult();
}
if(i.contains(Material.HAY_BLOCK)) {
h.setTamed(true);
if(i.contains(Material.SKULL_ITEM)) {
ItemStack head = i.getItem(i.first(Material.SKULL_ITEM));
Player owner = getOwner(head);
if(owner == null) {
h.setOwner(event.getPlayer());
}else{
h.setOwner(owner);
}
}else{
h.setOwner(event.getPlayer());
}
}else{
h.setTamed(false);
}
}
else if(event.getInventory().getName().equals("Spawnerinv")) {
Inventory i = event.getInventory();
SpawnerCache sc = SpawnerSelected.get(event.getPlayer());
ItemStack[] InvItems = i.getContents();
Inventory NewInv = Bukkit.createInventory(event.getPlayer(), 54, "Spawnerinv");
for(ItemStack is : InvItems) {
if(is.getType().equals(Material.MONSTER_EGG) && is.hasItemMeta() && is.getItemMeta().hasDisplayName() && is.getItemMeta().getDisplayName().contains(":")) {
NewInv.addItem(is);
}
}
}
}}
}
| public void OnInventoryCloseEvent(InventoryCloseEvent event) {
if(Armoryenabled.get(event.getPlayer()) != null){ if(Armoryenabled.get(event.getPlayer())) {
if(event.getInventory().getName().equals("zombieinv")) {
Inventory i = event.getInventory();
Zombie z = (Zombie) PlayerMobDataMap.get(event.getPlayer());
z.getEquipment().setHelmet(i.getItem(3));
z.getEquipment().setChestplate(i.getItem(2));
z.getEquipment().setLeggings(i.getItem(1));
z.getEquipment().setBoots(i.getItem(0));
z.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
z.setBaby(true);
}else{
z.setBaby(false);
}
}
else if(event.getInventory().getName().equals("skeletoninv")) {
Inventory i = event.getInventory();
Skeleton s = (Skeleton) PlayerMobDataMap.get(event.getPlayer());
s.getEquipment().setHelmet(i.getItem(3));
s.getEquipment().setChestplate(i.getItem(2));
s.getEquipment().setLeggings(i.getItem(1));
s.getEquipment().setBoots(i.getItem(0));
s.getEquipment().setItemInHand(i.getItem(4));
}
else if(event.getInventory().getName().equals("pigzombieinv")) {
Inventory i = event.getInventory();
PigZombie pz = (PigZombie) PlayerMobDataMap.get(event.getPlayer());
pz.getEquipment().setHelmet(i.getItem(3));
pz.getEquipment().setChestplate(i.getItem(2));
pz.getEquipment().setLeggings(i.getItem(1));
pz.getEquipment().setBoots(i.getItem(0));
pz.getEquipment().setItemInHand(i.getItem(4));
if(i.contains(Material.REDSTONE)) {
pz.setBaby(true);
}else{
pz.setBaby(false);
}
}
else if(event.getInventory().getName().equals("sheepinv")) {
Inventory i = event.getInventory();
Sheep sh = (Sheep) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
sh.setBaby();
}else{
sh.setAdult();
}
if(i.contains(Material.SHEARS)) {
sh.setSheared(true);
}else{
sh.setSheared(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
sh.setAgeLock(true);
}else{
sh.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("piginv")) {
Inventory i = event.getInventory();
Pig pig = (Pig) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
pig.setBaby();
}else{
pig.setAdult();
}
if(i.contains(Material.SADDLE)) {
pig.setSaddle(true);
}else{
pig.setSaddle(false);
}
if(i.contains(Material.GLOWSTONE_DUST)) {
pig.setAgeLock(true);
}else{
pig.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("cowinv")) {
Inventory i = event.getInventory();
Cow cow = (Cow) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
cow.setBaby();
}else{
cow.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
cow.setAgeLock(true);
}else{
cow.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("chickeninv")) {
Inventory i = event.getInventory();
Chicken c = (Chicken) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setBaby();
}else{
c.setAdult();
}
if(i.contains(Material.GLOWSTONE_DUST)) {
c.setAgeLock(true);
}else{
c.setAgeLock(false);
}
}
else if(event.getInventory().getName().equals("creeperinv")) {
Inventory i = event.getInventory();
Creeper c = (Creeper) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
c.setPowered(true);
}else{
c.setPowered(false);
}
}
else if(event.getInventory().getName().equals("horseinv")) {
Inventory i = event.getInventory();
Horse h = (Horse) PlayerMobDataMap.get(event.getPlayer());
if(i.contains(Material.REDSTONE)) {
h.setBaby();
}else{
h.setAdult();
}
if(i.contains(Material.HAY_BLOCK)) {
h.setTamed(true);
if(i.contains(Material.SKULL_ITEM)) {
ItemStack head = i.getItem(i.first(Material.SKULL_ITEM));
Player owner = getOwner(head);
if(owner == null) {
h.setOwner(event.getPlayer());
}else{
h.setOwner(owner);
}
}else{
h.setOwner(event.getPlayer());
}
}else{
h.setTamed(false);
}
}
else if(event.getInventory().getName().equals("Spawnerinv")) {
Inventory i = event.getInventory();
SpawnerCache sc = SpawnerSelected.get(event.getPlayer());
ItemStack[] InvItems = i.getContents();
Inventory NewInv = Bukkit.createInventory(event.getPlayer(), 54, "Spawnerinv");
for(ItemStack is : InvItems) {
if(is.getType().equals(Material.MONSTER_EGG) && is.hasItemMeta()) {
if(is.getItemMeta().hasDisplayName() && is.getItemMeta().getDisplayName().contains(":")) {
NewInv.addItem(is);
}
}
}
}
}}
}
|
diff --git a/servers/sip-servlets/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/deployers/TomcatConvergedDeployment.java b/servers/sip-servlets/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/deployers/TomcatConvergedDeployment.java
index 2fe6038e7..3dc704241 100644
--- a/servers/sip-servlets/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/deployers/TomcatConvergedDeployment.java
+++ b/servers/sip-servlets/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/deployers/TomcatConvergedDeployment.java
@@ -1,464 +1,464 @@
/*
* JBoss, Home of Professional Open Source
* Copyright 2008, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.web.tomcat.service.deployers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.security.CodeSource;
import java.security.cert.Certificate;
import java.util.zip.ZipFile;
import javax.management.Attribute;
import javax.management.ObjectName;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.servlet.sip.SipSessionsUtil;
import javax.servlet.sip.TimerService;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.Loader;
import org.apache.catalina.core.StandardContext;
import org.apache.tomcat.util.modeler.Registry;
import org.jboss.deployers.spi.DeploymentException;
import org.jboss.logging.Logger;
import org.jboss.metadata.sip.jboss.JBossConvergedSipMetaData;
import org.jboss.metadata.web.jboss.JBossWebMetaData;
import org.jboss.naming.NonSerializableFactory;
import org.jboss.security.SecurityUtil;
import org.jboss.virtual.VirtualFile;
import org.jboss.web.WebApplication;
import org.jboss.web.tomcat.security.JaccContextValve;
import org.jboss.web.tomcat.security.RunAsListener;
import org.jboss.web.tomcat.security.SecurityAssociationValve;
import org.jboss.web.tomcat.security.SecurityContextEstablishmentValve;
import org.jboss.web.tomcat.service.TomcatConvergedSipInjectionContainer;
import org.jboss.web.tomcat.service.session.AbstractJBossManager;
import org.jboss.web.tomcat.service.session.distributedcache.spi.ClusteringNotSupportedException;
import org.mobicents.servlet.sip.message.SipFactoryFacade;
import org.mobicents.servlet.sip.startup.SipContext;
import org.mobicents.servlet.sip.startup.jboss.SipJBossContextConfig;
/**
* A tomcat converged sip application deployment that will be able to deploy web applications, sip applications and converged sip/web applications.
*
* It extends the TomcatDeployment JBoss 5 class so that the config class for the context becomes org.mobicents.servlet.sip.startup.jboss.SipJBossContextConfig
* and that a ConvergedEncListener is set to the context to inject SipFactory, TimerService and SipSessionUtils into the private jndi of the context.
*
* @author [email protected]
*
*/
public class TomcatConvergedDeployment extends TomcatDeployment {
private static final Logger log = Logger
.getLogger(TomcatConvergedDeployment.class);
/**
* The name of the war level context configuration descriptor
*/
private static final String CONTEXT_CONFIG_FILE = "WEB-INF/context.xml";
public static final String SIP_SUBCONTEXT = "sip";
public static final String SIP_FACTORY_JNDI_NAME = "SipFactory";
public static final String SIP_SESSIONS_UTIL_JNDI_NAME = "SipSessionsUtil";
public static final String TIMER_SERVICE_JNDI_NAME = "TimerService";
protected DeployerConfig config;
private final String[] javaVMs = { " jboss.management.local:J2EEServer=Local,j2eeType=JVM,name=localhost" };
private final String serverName = "jboss";
@Override
public void init(Object containerConfig) throws Exception {
super.init(containerConfig);
this.config = (DeployerConfig) containerConfig;
}
@Override
protected void performDeployInternal(String hostName,
WebApplication webApp, String warUrl) throws Exception {
JBossWebMetaData metaData = webApp.getMetaData();
String ctxPath = metaData.getContextRoot();
if (ctxPath.equals("/") || ctxPath.equals("/ROOT")
|| ctxPath.equals("")) {
log.debug("deploy root context=" + ctxPath);
ctxPath = "/";
metaData.setContextRoot(ctxPath);
}
log.info("deploy, ctxPath=" + ctxPath + ", vfsUrl="
+ webApp.getDeploymentUnit().getFile("").getPathName());
URL url = new URL(warUrl);
ClassLoader loader = Thread.currentThread().getContextClassLoader();
metaData.setContextLoader(loader);
StandardContext context = (StandardContext) Class.forName(
config.getContextClassName()).newInstance();
TomcatConvergedSipInjectionContainer injectionContainer = new TomcatConvergedSipInjectionContainer(
webApp, webApp.getDeploymentUnit(), context,
getPersistenceUnitDependencyResolver());
setInjectionContainer(injectionContainer);
Loader webLoader = webApp.getDeploymentUnit().getAttachment(
Loader.class);
if (webLoader == null)
webLoader = getWebLoader(webApp.getDeploymentUnit(), metaData,
loader, url);
webApp.setName(url.getPath());
webApp.setClassLoader(loader);
webApp.setURL(url);
String objectNameS = config.getCatalinaDomain()
+ ":j2eeType=WebModule,name=//"
+ ((hostName == null) ? "localhost" : hostName) + ctxPath
+ ",J2EEApplication=none,J2EEServer=none";
ObjectName objectName = new ObjectName(objectNameS);
if (Registry.getRegistry(null, null).getMBeanServer().isRegistered(
objectName))
throw new DeploymentException(
"Web mapping already exists for deployment URL " + warUrl);
Registry.getRegistry(null, null).registerComponent(context, objectName,
config.getContextClassName());
if (TomcatService.OLD_CODE) {
String ctxConfig = null;
File warFile = new File(url.getFile());
if (warFile.isDirectory() == false) {
// Using VFS access
VFSDirContext resources = new VFSDirContext();
resources
.setVirtualFile(webApp.getDeploymentUnit().getFile(""));
context.setResources(resources);
// Find META-INF/context.xml
VirtualFile file = webApp.getDeploymentUnit().getFile(
CONTEXT_CONFIG_FILE);
if (file != null) {
// Copy the META-INF/context.xml from the VFS to the temp
// folder
InputStream is = file.openStream();
FileOutputStream fos = null;
try {
byte[] buffer = new byte[512];
int bytes;
// FIXME: use JBoss'temp folder instead
File tempFile = File.createTempFile("context-", ".xml");
tempFile.deleteOnExit();
fos = new FileOutputStream(tempFile);
while ((bytes = is.read(buffer)) > 0) {
fos.write(buffer, 0, bytes);
}
ctxConfig = tempFile.getAbsolutePath();
} finally {
is.close();
if (fos != null) {
fos.close();
}
}
}
} else {
// Using direct filesystem access: no operation needed
// Find META-INF/context.xml
File webDD = new File(warFile, CONTEXT_CONFIG_FILE);
if (webDD.exists() == true) {
ctxConfig = webDD.getAbsolutePath();
}
}
context.setConfigFile(ctxConfig);
} else {
context.setConfigFile(CONTEXT_CONFIG_FILE);
}
context.setInstanceManager(injectionContainer);
context.setDocBase(url.getFile());
context.setDefaultContextXml("context.xml");
context.setDefaultWebXml("conf/web.xml");
context.setPublicId(metaData.getPublicID());
// If there is an alt-dd set it
if (metaData.getAlternativeDD() != null) {
log.debug("Setting altDDName to: " + metaData.getAlternativeDD());
context.setAltDDName(metaData.getAlternativeDD());
}
context.setJavaVMs(javaVMs);
context.setServer(serverName);
context.setSaveConfig(false);
if (webLoader != null) {
context.setLoader(webLoader);
} else {
context.setParentClassLoader(loader);
}
context.setDelegate(webApp.getJava2ClassLoadingCompliance());
// Javac compatibility whenever possible
String[] jspCP = getCompileClasspath(loader);
StringBuffer classpath = new StringBuffer();
for (int u = 0; u < jspCP.length; u++) {
String repository = jspCP[u];
if (repository == null)
continue;
if (repository.startsWith("file://"))
repository = repository.substring(7);
else if (repository.startsWith("file:"))
repository = repository.substring(5);
else
continue;
if (repository == null)
continue;
// ok it is a file. Make sure that is is a directory or jar file
File fp = new File(repository);
if (!fp.isDirectory()) {
// if it is not a directory, try to open it as a zipfile.
try {
// avoid opening .xml files
if (fp.getName().toLowerCase().endsWith(".xml"))
continue;
ZipFile zip = new ZipFile(fp);
zip.close();
} catch (IOException e) {
continue;
}
}
if (u > 0)
classpath.append(File.pathSeparator);
classpath.append(repository);
}
context.setCompilerClasspath(classpath.toString());
// Set the session cookies flag according to metadata
switch (metaData.getSessionCookies()) {
case JBossWebMetaData.SESSION_COOKIES_ENABLED:
context.setCookies(true);
log.debug("Enabling session cookies");
break;
case JBossWebMetaData.SESSION_COOKIES_DISABLED:
context.setCookies(false);
log.debug("Disabling session cookies");
break;
default:
log.debug("Using session cookies default setting");
}
String metaDataSecurityDomain = metaData.getSecurityDomain();
if (metaDataSecurityDomain != null)
metaDataSecurityDomain = metaDataSecurityDomain.trim();
// TODO : add the security valve again with SecurityActions enabled. It was commented due to IllegalAccessError, it is a regression from regular JBoss 5
// Add a valve to establish security context
// SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
// metaDataSecurityDomain, SecurityUtil
// .unprefixSecurityDomain(config
// .getDefaultSecurityDomain()), SecurityActions
// .loadClass(config.getSecurityContextClassName()),
// getSecurityManagement());
SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
metaDataSecurityDomain, SecurityUtil
.unprefixSecurityDomain(config
- .getDefaultSecurityDomain()), config.getSecurityContextClassName().getClass(),
+ .getDefaultSecurityDomain()), Class.forName(config.getSecurityContextClassName()),
getSecurityManagement());
context.addValve(scevalve);
// Add a valve to estalish the JACC context before authorization valves
Certificate[] certs = null;
CodeSource cs = new CodeSource(url, certs);
JaccContextValve jaccValve = new JaccContextValve(metaData, cs);
context.addValve(jaccValve);
// Set listener
context.setConfigClass("org.mobicents.servlet.sip.startup.jboss.SipJBossContextConfig");
context.addLifecycleListener(new ConvergedEncListener(hostName, loader, webLoader, webApp));
// Pass the metadata to the RunAsListener via a thread local
RunAsListener.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataShared.set(config.getSharedMetaData());
SipJBossContextConfig.deployerConfig.set(config);
SipJBossContextConfig.kernelLocal.set(kernel);
SipJBossContextConfig.deploymentUnitLocal.set(unit);
try {
// Start it
context.start();
// Build the ENC
} catch (Exception e) {
context.destroy();
DeploymentException.rethrowAsDeploymentException("URL " + warUrl
+ " deployment failed", e);
} finally {
RunAsListener.metaDataLocal.set(null);
SipJBossContextConfig.metaDataLocal.set(null);
SipJBossContextConfig.metaDataShared.set(null);
SipJBossContextConfig.deployerConfig.set(null);
SipJBossContextConfig.kernelLocal.set(null);
SipJBossContextConfig.deploymentUnitLocal.set(null);
}
if (context.getState() != 1) {
context.destroy();
throw new DeploymentException("URL " + warUrl
+ " deployment failed");
}
// Clustering
if (metaData.getDistributable() != null) {
// Try to initate clustering, fallback to standard if no clustering
// is
// available
try {
AbstractJBossManager manager = null;
String managerClassName = config.getManagerClass();
Class managerClass = Thread.currentThread()
.getContextClassLoader().loadClass(managerClassName);
manager = (AbstractJBossManager) managerClass.newInstance();
String name = "//"
+ ((hostName == null) ? "localhost" : hostName)
+ ctxPath;
manager.init(name, metaData);
server.setAttribute(objectName, new Attribute("manager",
manager));
log.debug("Enabled clustering support for ctxPath=" + ctxPath);
} catch (ClusteringNotSupportedException e) {
// JBAS-3513 Just log a WARN, not an ERROR
log
.warn("Failed to setup clustering, clustering disabled. ClusteringNotSupportedException: "
+ e.getMessage());
} catch (NoClassDefFoundError ncdf) {
// JBAS-3513 Just log a WARN, not an ERROR
log.debug("Classes needed for clustered webapp unavailable",
ncdf);
log
.warn("Failed to setup clustering, clustering disabled. NoClassDefFoundError: "
+ ncdf.getMessage());
} catch (Throwable t) {
// TODO consider letting this through and fail the deployment
log
.error(
"Failed to setup clustering, clustering disabled. Exception: ",
t);
}
}
/*
* Add security association valve after the authorization valves so that
* the authenticated user may be associated with the request
* thread/session.
*/
SecurityAssociationValve valve = new SecurityAssociationValve(metaData,
config.getSecurityManagerService());
valve.setSubjectAttributeName(config.getSubjectAttributeName());
server.invoke(objectName, "addValve", new Object[] { valve },
new String[] { "org.apache.catalina.Valve" });
/*
* TODO: Retrieve the state, and throw an exception in case of a failure
* Integer state = (Integer) server.getAttribute(objectName, "state");
* if (state.intValue() != 1) { throw new DeploymentException("URL " +
* warUrl + " deployment failed"); }
*/
webApp.setAppData(objectName);
/*
* TODO: Create mbeans for the servlets ObjectName servletQuery = new
* ObjectName (config.getCatalinaDomain() +
* ":j2eeType=Servlet,WebModule=" + objectName.getKeyProperty("name") +
* ",*"); Iterator iterator = server.queryMBeans(servletQuery,
* null).iterator(); while (iterator.hasNext()) {
* di.mbeans.add(((ObjectInstance)iterator.next()).getObjectName()); }
*/
log.debug("Initialized: " + webApp + " " + objectName);
}
public class ConvergedEncListener extends EncListener
{
protected String hostName;
public ConvergedEncListener(String hostName, ClassLoader loader, Loader webLoader, WebApplication webApp) {
super(loader, webLoader, webApp);
this.hostName = hostName;
}
public void lifecycleEvent(LifecycleEvent event) {
super.lifecycleEvent(event);
if (event.getType().equals(StandardContext.AFTER_START_EVENT)) {
JBossConvergedSipMetaData convergedMetaData = (JBossConvergedSipMetaData) metaData ;
Thread currentThread = Thread.currentThread();
ClassLoader currentLoader = currentThread.getContextClassLoader();
currentThread.setContextClassLoader(webLoader.getClassLoader());
try {
InitialContext iniCtx = new InitialContext();
Context envCtx = (Context) iniCtx.lookup("java:comp/env");
Context sipSubcontext = envCtx.createSubcontext(SIP_SUBCONTEXT);
Context applicationNameSubcontext = sipSubcontext.createSubcontext(convergedMetaData.getApplicationName());
SipContext sipContext = (SipContext) event.getSource();
SipFactoryFacade sipFactoryFacade = (SipFactoryFacade) sipContext.getSipFactoryFacade();
TimerService timerService = (TimerService) sipContext.getTimerService();
SipSessionsUtil sipSessionsUtil = (SipSessionsUtil) sipContext.getSipSessionsUtil();
NonSerializableFactory.rebind(
applicationNameSubcontext,
SIP_FACTORY_JNDI_NAME,
sipFactoryFacade);
NonSerializableFactory
.rebind(
applicationNameSubcontext,
SIP_SESSIONS_UTIL_JNDI_NAME,
sipSessionsUtil);
NonSerializableFactory
.rebind(
applicationNameSubcontext,
TIMER_SERVICE_JNDI_NAME,
timerService);
if (log.isDebugEnabled()) {
log
.debug("Sip Objects made available to global JNDI under following conetxt : java:comp/env/sip/"
+ convergedMetaData.getApplicationName() + "/<ObjectName>");
}
}
catch (Throwable t) {
log.error("ENC setup failed", t);
throw new RuntimeException(t);
}
finally {
currentThread.setContextClassLoader(currentLoader);
}
}
}
}
}
| true | true | protected void performDeployInternal(String hostName,
WebApplication webApp, String warUrl) throws Exception {
JBossWebMetaData metaData = webApp.getMetaData();
String ctxPath = metaData.getContextRoot();
if (ctxPath.equals("/") || ctxPath.equals("/ROOT")
|| ctxPath.equals("")) {
log.debug("deploy root context=" + ctxPath);
ctxPath = "/";
metaData.setContextRoot(ctxPath);
}
log.info("deploy, ctxPath=" + ctxPath + ", vfsUrl="
+ webApp.getDeploymentUnit().getFile("").getPathName());
URL url = new URL(warUrl);
ClassLoader loader = Thread.currentThread().getContextClassLoader();
metaData.setContextLoader(loader);
StandardContext context = (StandardContext) Class.forName(
config.getContextClassName()).newInstance();
TomcatConvergedSipInjectionContainer injectionContainer = new TomcatConvergedSipInjectionContainer(
webApp, webApp.getDeploymentUnit(), context,
getPersistenceUnitDependencyResolver());
setInjectionContainer(injectionContainer);
Loader webLoader = webApp.getDeploymentUnit().getAttachment(
Loader.class);
if (webLoader == null)
webLoader = getWebLoader(webApp.getDeploymentUnit(), metaData,
loader, url);
webApp.setName(url.getPath());
webApp.setClassLoader(loader);
webApp.setURL(url);
String objectNameS = config.getCatalinaDomain()
+ ":j2eeType=WebModule,name=//"
+ ((hostName == null) ? "localhost" : hostName) + ctxPath
+ ",J2EEApplication=none,J2EEServer=none";
ObjectName objectName = new ObjectName(objectNameS);
if (Registry.getRegistry(null, null).getMBeanServer().isRegistered(
objectName))
throw new DeploymentException(
"Web mapping already exists for deployment URL " + warUrl);
Registry.getRegistry(null, null).registerComponent(context, objectName,
config.getContextClassName());
if (TomcatService.OLD_CODE) {
String ctxConfig = null;
File warFile = new File(url.getFile());
if (warFile.isDirectory() == false) {
// Using VFS access
VFSDirContext resources = new VFSDirContext();
resources
.setVirtualFile(webApp.getDeploymentUnit().getFile(""));
context.setResources(resources);
// Find META-INF/context.xml
VirtualFile file = webApp.getDeploymentUnit().getFile(
CONTEXT_CONFIG_FILE);
if (file != null) {
// Copy the META-INF/context.xml from the VFS to the temp
// folder
InputStream is = file.openStream();
FileOutputStream fos = null;
try {
byte[] buffer = new byte[512];
int bytes;
// FIXME: use JBoss'temp folder instead
File tempFile = File.createTempFile("context-", ".xml");
tempFile.deleteOnExit();
fos = new FileOutputStream(tempFile);
while ((bytes = is.read(buffer)) > 0) {
fos.write(buffer, 0, bytes);
}
ctxConfig = tempFile.getAbsolutePath();
} finally {
is.close();
if (fos != null) {
fos.close();
}
}
}
} else {
// Using direct filesystem access: no operation needed
// Find META-INF/context.xml
File webDD = new File(warFile, CONTEXT_CONFIG_FILE);
if (webDD.exists() == true) {
ctxConfig = webDD.getAbsolutePath();
}
}
context.setConfigFile(ctxConfig);
} else {
context.setConfigFile(CONTEXT_CONFIG_FILE);
}
context.setInstanceManager(injectionContainer);
context.setDocBase(url.getFile());
context.setDefaultContextXml("context.xml");
context.setDefaultWebXml("conf/web.xml");
context.setPublicId(metaData.getPublicID());
// If there is an alt-dd set it
if (metaData.getAlternativeDD() != null) {
log.debug("Setting altDDName to: " + metaData.getAlternativeDD());
context.setAltDDName(metaData.getAlternativeDD());
}
context.setJavaVMs(javaVMs);
context.setServer(serverName);
context.setSaveConfig(false);
if (webLoader != null) {
context.setLoader(webLoader);
} else {
context.setParentClassLoader(loader);
}
context.setDelegate(webApp.getJava2ClassLoadingCompliance());
// Javac compatibility whenever possible
String[] jspCP = getCompileClasspath(loader);
StringBuffer classpath = new StringBuffer();
for (int u = 0; u < jspCP.length; u++) {
String repository = jspCP[u];
if (repository == null)
continue;
if (repository.startsWith("file://"))
repository = repository.substring(7);
else if (repository.startsWith("file:"))
repository = repository.substring(5);
else
continue;
if (repository == null)
continue;
// ok it is a file. Make sure that is is a directory or jar file
File fp = new File(repository);
if (!fp.isDirectory()) {
// if it is not a directory, try to open it as a zipfile.
try {
// avoid opening .xml files
if (fp.getName().toLowerCase().endsWith(".xml"))
continue;
ZipFile zip = new ZipFile(fp);
zip.close();
} catch (IOException e) {
continue;
}
}
if (u > 0)
classpath.append(File.pathSeparator);
classpath.append(repository);
}
context.setCompilerClasspath(classpath.toString());
// Set the session cookies flag according to metadata
switch (metaData.getSessionCookies()) {
case JBossWebMetaData.SESSION_COOKIES_ENABLED:
context.setCookies(true);
log.debug("Enabling session cookies");
break;
case JBossWebMetaData.SESSION_COOKIES_DISABLED:
context.setCookies(false);
log.debug("Disabling session cookies");
break;
default:
log.debug("Using session cookies default setting");
}
String metaDataSecurityDomain = metaData.getSecurityDomain();
if (metaDataSecurityDomain != null)
metaDataSecurityDomain = metaDataSecurityDomain.trim();
// TODO : add the security valve again with SecurityActions enabled. It was commented due to IllegalAccessError, it is a regression from regular JBoss 5
// Add a valve to establish security context
// SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
// metaDataSecurityDomain, SecurityUtil
// .unprefixSecurityDomain(config
// .getDefaultSecurityDomain()), SecurityActions
// .loadClass(config.getSecurityContextClassName()),
// getSecurityManagement());
SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
metaDataSecurityDomain, SecurityUtil
.unprefixSecurityDomain(config
.getDefaultSecurityDomain()), config.getSecurityContextClassName().getClass(),
getSecurityManagement());
context.addValve(scevalve);
// Add a valve to estalish the JACC context before authorization valves
Certificate[] certs = null;
CodeSource cs = new CodeSource(url, certs);
JaccContextValve jaccValve = new JaccContextValve(metaData, cs);
context.addValve(jaccValve);
// Set listener
context.setConfigClass("org.mobicents.servlet.sip.startup.jboss.SipJBossContextConfig");
context.addLifecycleListener(new ConvergedEncListener(hostName, loader, webLoader, webApp));
// Pass the metadata to the RunAsListener via a thread local
RunAsListener.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataShared.set(config.getSharedMetaData());
SipJBossContextConfig.deployerConfig.set(config);
SipJBossContextConfig.kernelLocal.set(kernel);
SipJBossContextConfig.deploymentUnitLocal.set(unit);
try {
// Start it
context.start();
// Build the ENC
} catch (Exception e) {
context.destroy();
DeploymentException.rethrowAsDeploymentException("URL " + warUrl
+ " deployment failed", e);
} finally {
RunAsListener.metaDataLocal.set(null);
SipJBossContextConfig.metaDataLocal.set(null);
SipJBossContextConfig.metaDataShared.set(null);
SipJBossContextConfig.deployerConfig.set(null);
SipJBossContextConfig.kernelLocal.set(null);
SipJBossContextConfig.deploymentUnitLocal.set(null);
}
if (context.getState() != 1) {
context.destroy();
throw new DeploymentException("URL " + warUrl
+ " deployment failed");
}
// Clustering
if (metaData.getDistributable() != null) {
// Try to initate clustering, fallback to standard if no clustering
// is
// available
try {
AbstractJBossManager manager = null;
String managerClassName = config.getManagerClass();
Class managerClass = Thread.currentThread()
.getContextClassLoader().loadClass(managerClassName);
manager = (AbstractJBossManager) managerClass.newInstance();
String name = "//"
+ ((hostName == null) ? "localhost" : hostName)
+ ctxPath;
manager.init(name, metaData);
server.setAttribute(objectName, new Attribute("manager",
manager));
log.debug("Enabled clustering support for ctxPath=" + ctxPath);
} catch (ClusteringNotSupportedException e) {
// JBAS-3513 Just log a WARN, not an ERROR
log
.warn("Failed to setup clustering, clustering disabled. ClusteringNotSupportedException: "
+ e.getMessage());
} catch (NoClassDefFoundError ncdf) {
// JBAS-3513 Just log a WARN, not an ERROR
log.debug("Classes needed for clustered webapp unavailable",
ncdf);
log
.warn("Failed to setup clustering, clustering disabled. NoClassDefFoundError: "
+ ncdf.getMessage());
} catch (Throwable t) {
// TODO consider letting this through and fail the deployment
log
.error(
"Failed to setup clustering, clustering disabled. Exception: ",
t);
}
}
/*
* Add security association valve after the authorization valves so that
* the authenticated user may be associated with the request
* thread/session.
*/
SecurityAssociationValve valve = new SecurityAssociationValve(metaData,
config.getSecurityManagerService());
valve.setSubjectAttributeName(config.getSubjectAttributeName());
server.invoke(objectName, "addValve", new Object[] { valve },
new String[] { "org.apache.catalina.Valve" });
/*
* TODO: Retrieve the state, and throw an exception in case of a failure
* Integer state = (Integer) server.getAttribute(objectName, "state");
* if (state.intValue() != 1) { throw new DeploymentException("URL " +
* warUrl + " deployment failed"); }
*/
webApp.setAppData(objectName);
/*
* TODO: Create mbeans for the servlets ObjectName servletQuery = new
* ObjectName (config.getCatalinaDomain() +
* ":j2eeType=Servlet,WebModule=" + objectName.getKeyProperty("name") +
* ",*"); Iterator iterator = server.queryMBeans(servletQuery,
* null).iterator(); while (iterator.hasNext()) {
* di.mbeans.add(((ObjectInstance)iterator.next()).getObjectName()); }
*/
log.debug("Initialized: " + webApp + " " + objectName);
}
| protected void performDeployInternal(String hostName,
WebApplication webApp, String warUrl) throws Exception {
JBossWebMetaData metaData = webApp.getMetaData();
String ctxPath = metaData.getContextRoot();
if (ctxPath.equals("/") || ctxPath.equals("/ROOT")
|| ctxPath.equals("")) {
log.debug("deploy root context=" + ctxPath);
ctxPath = "/";
metaData.setContextRoot(ctxPath);
}
log.info("deploy, ctxPath=" + ctxPath + ", vfsUrl="
+ webApp.getDeploymentUnit().getFile("").getPathName());
URL url = new URL(warUrl);
ClassLoader loader = Thread.currentThread().getContextClassLoader();
metaData.setContextLoader(loader);
StandardContext context = (StandardContext) Class.forName(
config.getContextClassName()).newInstance();
TomcatConvergedSipInjectionContainer injectionContainer = new TomcatConvergedSipInjectionContainer(
webApp, webApp.getDeploymentUnit(), context,
getPersistenceUnitDependencyResolver());
setInjectionContainer(injectionContainer);
Loader webLoader = webApp.getDeploymentUnit().getAttachment(
Loader.class);
if (webLoader == null)
webLoader = getWebLoader(webApp.getDeploymentUnit(), metaData,
loader, url);
webApp.setName(url.getPath());
webApp.setClassLoader(loader);
webApp.setURL(url);
String objectNameS = config.getCatalinaDomain()
+ ":j2eeType=WebModule,name=//"
+ ((hostName == null) ? "localhost" : hostName) + ctxPath
+ ",J2EEApplication=none,J2EEServer=none";
ObjectName objectName = new ObjectName(objectNameS);
if (Registry.getRegistry(null, null).getMBeanServer().isRegistered(
objectName))
throw new DeploymentException(
"Web mapping already exists for deployment URL " + warUrl);
Registry.getRegistry(null, null).registerComponent(context, objectName,
config.getContextClassName());
if (TomcatService.OLD_CODE) {
String ctxConfig = null;
File warFile = new File(url.getFile());
if (warFile.isDirectory() == false) {
// Using VFS access
VFSDirContext resources = new VFSDirContext();
resources
.setVirtualFile(webApp.getDeploymentUnit().getFile(""));
context.setResources(resources);
// Find META-INF/context.xml
VirtualFile file = webApp.getDeploymentUnit().getFile(
CONTEXT_CONFIG_FILE);
if (file != null) {
// Copy the META-INF/context.xml from the VFS to the temp
// folder
InputStream is = file.openStream();
FileOutputStream fos = null;
try {
byte[] buffer = new byte[512];
int bytes;
// FIXME: use JBoss'temp folder instead
File tempFile = File.createTempFile("context-", ".xml");
tempFile.deleteOnExit();
fos = new FileOutputStream(tempFile);
while ((bytes = is.read(buffer)) > 0) {
fos.write(buffer, 0, bytes);
}
ctxConfig = tempFile.getAbsolutePath();
} finally {
is.close();
if (fos != null) {
fos.close();
}
}
}
} else {
// Using direct filesystem access: no operation needed
// Find META-INF/context.xml
File webDD = new File(warFile, CONTEXT_CONFIG_FILE);
if (webDD.exists() == true) {
ctxConfig = webDD.getAbsolutePath();
}
}
context.setConfigFile(ctxConfig);
} else {
context.setConfigFile(CONTEXT_CONFIG_FILE);
}
context.setInstanceManager(injectionContainer);
context.setDocBase(url.getFile());
context.setDefaultContextXml("context.xml");
context.setDefaultWebXml("conf/web.xml");
context.setPublicId(metaData.getPublicID());
// If there is an alt-dd set it
if (metaData.getAlternativeDD() != null) {
log.debug("Setting altDDName to: " + metaData.getAlternativeDD());
context.setAltDDName(metaData.getAlternativeDD());
}
context.setJavaVMs(javaVMs);
context.setServer(serverName);
context.setSaveConfig(false);
if (webLoader != null) {
context.setLoader(webLoader);
} else {
context.setParentClassLoader(loader);
}
context.setDelegate(webApp.getJava2ClassLoadingCompliance());
// Javac compatibility whenever possible
String[] jspCP = getCompileClasspath(loader);
StringBuffer classpath = new StringBuffer();
for (int u = 0; u < jspCP.length; u++) {
String repository = jspCP[u];
if (repository == null)
continue;
if (repository.startsWith("file://"))
repository = repository.substring(7);
else if (repository.startsWith("file:"))
repository = repository.substring(5);
else
continue;
if (repository == null)
continue;
// ok it is a file. Make sure that is is a directory or jar file
File fp = new File(repository);
if (!fp.isDirectory()) {
// if it is not a directory, try to open it as a zipfile.
try {
// avoid opening .xml files
if (fp.getName().toLowerCase().endsWith(".xml"))
continue;
ZipFile zip = new ZipFile(fp);
zip.close();
} catch (IOException e) {
continue;
}
}
if (u > 0)
classpath.append(File.pathSeparator);
classpath.append(repository);
}
context.setCompilerClasspath(classpath.toString());
// Set the session cookies flag according to metadata
switch (metaData.getSessionCookies()) {
case JBossWebMetaData.SESSION_COOKIES_ENABLED:
context.setCookies(true);
log.debug("Enabling session cookies");
break;
case JBossWebMetaData.SESSION_COOKIES_DISABLED:
context.setCookies(false);
log.debug("Disabling session cookies");
break;
default:
log.debug("Using session cookies default setting");
}
String metaDataSecurityDomain = metaData.getSecurityDomain();
if (metaDataSecurityDomain != null)
metaDataSecurityDomain = metaDataSecurityDomain.trim();
// TODO : add the security valve again with SecurityActions enabled. It was commented due to IllegalAccessError, it is a regression from regular JBoss 5
// Add a valve to establish security context
// SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
// metaDataSecurityDomain, SecurityUtil
// .unprefixSecurityDomain(config
// .getDefaultSecurityDomain()), SecurityActions
// .loadClass(config.getSecurityContextClassName()),
// getSecurityManagement());
SecurityContextEstablishmentValve scevalve = new SecurityContextEstablishmentValve(
metaDataSecurityDomain, SecurityUtil
.unprefixSecurityDomain(config
.getDefaultSecurityDomain()), Class.forName(config.getSecurityContextClassName()),
getSecurityManagement());
context.addValve(scevalve);
// Add a valve to estalish the JACC context before authorization valves
Certificate[] certs = null;
CodeSource cs = new CodeSource(url, certs);
JaccContextValve jaccValve = new JaccContextValve(metaData, cs);
context.addValve(jaccValve);
// Set listener
context.setConfigClass("org.mobicents.servlet.sip.startup.jboss.SipJBossContextConfig");
context.addLifecycleListener(new ConvergedEncListener(hostName, loader, webLoader, webApp));
// Pass the metadata to the RunAsListener via a thread local
RunAsListener.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataLocal.set(metaData);
SipJBossContextConfig.metaDataShared.set(config.getSharedMetaData());
SipJBossContextConfig.deployerConfig.set(config);
SipJBossContextConfig.kernelLocal.set(kernel);
SipJBossContextConfig.deploymentUnitLocal.set(unit);
try {
// Start it
context.start();
// Build the ENC
} catch (Exception e) {
context.destroy();
DeploymentException.rethrowAsDeploymentException("URL " + warUrl
+ " deployment failed", e);
} finally {
RunAsListener.metaDataLocal.set(null);
SipJBossContextConfig.metaDataLocal.set(null);
SipJBossContextConfig.metaDataShared.set(null);
SipJBossContextConfig.deployerConfig.set(null);
SipJBossContextConfig.kernelLocal.set(null);
SipJBossContextConfig.deploymentUnitLocal.set(null);
}
if (context.getState() != 1) {
context.destroy();
throw new DeploymentException("URL " + warUrl
+ " deployment failed");
}
// Clustering
if (metaData.getDistributable() != null) {
// Try to initate clustering, fallback to standard if no clustering
// is
// available
try {
AbstractJBossManager manager = null;
String managerClassName = config.getManagerClass();
Class managerClass = Thread.currentThread()
.getContextClassLoader().loadClass(managerClassName);
manager = (AbstractJBossManager) managerClass.newInstance();
String name = "//"
+ ((hostName == null) ? "localhost" : hostName)
+ ctxPath;
manager.init(name, metaData);
server.setAttribute(objectName, new Attribute("manager",
manager));
log.debug("Enabled clustering support for ctxPath=" + ctxPath);
} catch (ClusteringNotSupportedException e) {
// JBAS-3513 Just log a WARN, not an ERROR
log
.warn("Failed to setup clustering, clustering disabled. ClusteringNotSupportedException: "
+ e.getMessage());
} catch (NoClassDefFoundError ncdf) {
// JBAS-3513 Just log a WARN, not an ERROR
log.debug("Classes needed for clustered webapp unavailable",
ncdf);
log
.warn("Failed to setup clustering, clustering disabled. NoClassDefFoundError: "
+ ncdf.getMessage());
} catch (Throwable t) {
// TODO consider letting this through and fail the deployment
log
.error(
"Failed to setup clustering, clustering disabled. Exception: ",
t);
}
}
/*
* Add security association valve after the authorization valves so that
* the authenticated user may be associated with the request
* thread/session.
*/
SecurityAssociationValve valve = new SecurityAssociationValve(metaData,
config.getSecurityManagerService());
valve.setSubjectAttributeName(config.getSubjectAttributeName());
server.invoke(objectName, "addValve", new Object[] { valve },
new String[] { "org.apache.catalina.Valve" });
/*
* TODO: Retrieve the state, and throw an exception in case of a failure
* Integer state = (Integer) server.getAttribute(objectName, "state");
* if (state.intValue() != 1) { throw new DeploymentException("URL " +
* warUrl + " deployment failed"); }
*/
webApp.setAppData(objectName);
/*
* TODO: Create mbeans for the servlets ObjectName servletQuery = new
* ObjectName (config.getCatalinaDomain() +
* ":j2eeType=Servlet,WebModule=" + objectName.getKeyProperty("name") +
* ",*"); Iterator iterator = server.queryMBeans(servletQuery,
* null).iterator(); while (iterator.hasNext()) {
* di.mbeans.add(((ObjectInstance)iterator.next()).getObjectName()); }
*/
log.debug("Initialized: " + webApp + " " + objectName);
}
|
diff --git a/gerrit-server/src/main/java/com/google/gerrit/server/change/PostReview.java b/gerrit-server/src/main/java/com/google/gerrit/server/change/PostReview.java
index bc3c1614c..5011f31bf 100644
--- a/gerrit-server/src/main/java/com/google/gerrit/server/change/PostReview.java
+++ b/gerrit-server/src/main/java/com/google/gerrit/server/change/PostReview.java
@@ -1,438 +1,438 @@
// Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.change;
import com.google.common.base.Objects;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gerrit.common.ChangeHooks;
import com.google.gerrit.common.data.ApprovalType;
import com.google.gerrit.common.data.ApprovalTypes;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.DefaultInput;
import com.google.gerrit.extensions.restapi.RestModifyView;
import com.google.gerrit.reviewdb.client.ApprovalCategory;
import com.google.gerrit.reviewdb.client.ApprovalCategoryValue;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.Patch;
import com.google.gerrit.reviewdb.client.PatchLineComment;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.change.PostReview.Input;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
class PostReview implements RestModifyView<RevisionResource, Input> {
private static final Logger log = LoggerFactory.getLogger(PostReview.class);
static class Input {
@DefaultInput
String message;
Map<String, Short> labels;
Map<String, List<Comment>> comments;
/**
* If true require all labels to be within the user's permitted ranges based
* on access controls, attempting to use a label not granted to the user
* will fail the entire modify operation early. If false the operation will
* execute anyway, but the proposed labels given by the user will be
* modified to be the "best" value allowed by the access controls.
*/
boolean strictLabels = true;
/**
* How to process draft comments already in the database that were not also
* described in this input request.
*/
DraftHandling drafts = DraftHandling.DELETE;
}
static enum DraftHandling {
DELETE, PUBLISH, KEEP;
}
static class Comment {
String id;
GetDraft.Side side;
int line;
String message;
}
static class Output {
Map<String, Short> labels;
}
private final ReviewDb db;
private final ApprovalTypes approvalTypes;
private final EmailReviewComments.Factory email;
@Deprecated private final ChangeHooks hooks;
private Change change;
private ChangeMessage message;
private Timestamp timestamp;
private List<PatchLineComment> comments = Lists.newArrayList();
private List<String> labelDelta = Lists.newArrayList();
@Deprecated private Map<ApprovalCategory.Id, ApprovalCategoryValue.Id> categories
= Maps.newHashMap();
@Inject
PostReview(ReviewDb db,
ApprovalTypes approvalTypes,
EmailReviewComments.Factory email,
ChangeHooks hooks) {
this.db = db;
this.approvalTypes = approvalTypes;
this.email = email;
this.hooks = hooks;
}
@Override
public Class<Input> inputType() {
return Input.class;
}
@Override
public Object apply(RevisionResource revision, Input input)
throws AuthException, BadRequestException, OrmException {
if (input.labels != null) {
checkLabels(revision, input.strictLabels, input.labels);
}
if (input.comments != null) {
checkComments(input.comments);
}
db.changes().beginTransaction(revision.getChange().getId());
try {
change = db.changes().get(revision.getChange().getId());
ChangeUtil.updated(change);
timestamp = change.getLastUpdatedOn();
if (input.comments != null) {
insertComments(revision, input.comments, input.drafts);
}
if (change.getStatus().isOpen() && input.labels != null) {
// TODO Allow updating some labels even when closed.
updateLabels(revision, input.labels);
}
insertMessage(revision, input.message);
db.changes().update(Collections.singleton(change));
db.commit();
} finally {
db.rollback();
}
email.create(
change,
revision.getPatchSet(),
revision.getAuthorId(),
message,
comments).sendAsync();
fireCommentAddedHook(revision);
Output output = new Output();
output.labels = input.labels;
return output;
}
private void checkLabels(RevisionResource revision, boolean strict,
Map<String, Short> labels) throws BadRequestException, AuthException {
ChangeControl ctl = revision.getControl();
Iterator<Map.Entry<String, Short>> itr = labels.entrySet().iterator();
while (itr.hasNext()) {
Map.Entry<String, Short> ent = itr.next();
// TODO Support more generic label assignments.
ApprovalType at = approvalTypes.byLabel(ent.getKey());
if (at == null) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\" is not a configured ApprovalCategory",
ent.getKey()));
} else {
itr.remove();
continue;
}
}
if (ent.getValue() == null || ent.getValue() == 0) {
// Always permit 0, even if it is not within range.
// Later null/0 will be deleted and revoke the label.
continue;
}
- if (!at.getValuesAsList().contains(ent.getValue())) {
+ if (at.getValue(ent.getValue()) == null) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\": %d is not a valid value",
ent.getKey(), ent.getValue()));
} else {
itr.remove();
continue;
}
}
String name = at.getCategory().getLabelName();
PermissionRange range = ctl.getRange(Permission.forLabel(name));
if (range == null || !range.contains(ent.getValue())) {
if (strict) {
throw new AuthException(String.format(
"Applying label \"%s\": %d is restricted",
ent.getKey(), ent.getValue()));
} else if (range == null || range.isEmpty()) {
ent.setValue((short) 0);
} else {
ent.setValue((short) range.squash(ent.getValue()));
}
}
}
}
private void checkComments(Map<String, List<Comment>> in)
throws BadRequestException {
Iterator<Map.Entry<String, List<Comment>>> mapItr =
in.entrySet().iterator();
while (mapItr.hasNext()) {
Map.Entry<String, List<Comment>> ent = mapItr.next();
String path = ent.getKey();
List<Comment> list = ent.getValue();
if (list == null) {
mapItr.remove();
continue;
}
Iterator<Comment> listItr = list.iterator();
while (listItr.hasNext()) {
Comment c = listItr.next();
if (c.line < 0) {
throw new BadRequestException(String.format(
"negative line number %d not allowed on %s",
c.line, path));
}
c.message = Strings.emptyToNull(c.message).trim();
if (c.message.isEmpty()) {
listItr.remove();
}
}
if (list.isEmpty()) {
mapItr.remove();
}
}
}
private void insertComments(RevisionResource rsrc,
Map<String, List<Comment>> in, DraftHandling draftsHandling)
throws OrmException {
Map<String, PatchLineComment> drafts = scanDraftComments(rsrc);
List<PatchLineComment> del = Lists.newArrayList();
List<PatchLineComment> ins = Lists.newArrayList();
List<PatchLineComment> upd = Lists.newArrayList();
for (Map.Entry<String, List<Comment>> ent : in.entrySet()) {
String path = ent.getKey();
for (Comment c : ent.getValue()) {
PatchLineComment e = drafts.remove(c.id);
boolean create = e == null;
if (create) {
e = new PatchLineComment(
new PatchLineComment.Key(
new Patch.Key(rsrc.getPatchSet().getId(), path),
ChangeUtil.messageUUID(db)),
c.line,
rsrc.getAuthorId(),
null);
}
e.setStatus(PatchLineComment.Status.PUBLISHED);
e.setWrittenOn(timestamp);
e.setSide(c.side == GetDraft.Side.PARENT ? (short) 0 : (short) 1);
e.setMessage(c.message);
(create ? ins : upd).add(e);
}
}
switch (Objects.firstNonNull(draftsHandling, DraftHandling.DELETE)) {
case KEEP:
default:
break;
case DELETE:
del.addAll(drafts.values());
break;
case PUBLISH:
for (PatchLineComment e : drafts.values()) {
e.setStatus(PatchLineComment.Status.PUBLISHED);
e.setWrittenOn(timestamp);
upd.add(e);
}
break;
}
db.patchComments().delete(del);
db.patchComments().insert(ins);
db.patchComments().update(upd);
comments.addAll(ins);
comments.addAll(upd);
}
private Map<String, PatchLineComment> scanDraftComments(
RevisionResource rsrc) throws OrmException {
Map<String, PatchLineComment> drafts = Maps.newHashMap();
for (PatchLineComment c : db.patchComments().draftByPatchSetAuthor(
rsrc.getPatchSet().getId(),
rsrc.getAuthorId())) {
drafts.put(c.getKey().get(), c);
}
return drafts;
}
private void updateLabels(RevisionResource rsrc, Map<String, Short> labels)
throws OrmException {
List<PatchSetApproval> del = Lists.newArrayList();
List<PatchSetApproval> ins = Lists.newArrayList();
List<PatchSetApproval> upd = Lists.newArrayList();
Map<String, PatchSetApproval> current = scanLabels(rsrc, del);
for (Map.Entry<String, Short> ent : labels.entrySet()) {
// TODO Support arbitrary label names.
ApprovalType at = approvalTypes.byLabel(ent.getKey());
String name = at.getCategory().getLabelName();
PatchSetApproval c = current.get(name);
if (ent.getValue() == null || ent.getValue() == 0) {
// User requested delete of this label.
if (c != null) {
del.add(c);
labelDelta.add("-" + name);
}
continue;
}
if (c != null && c.getValue() != ent.getValue()) {
c.setValue(ent.getValue());
c.setGranted(timestamp);
c.cache(change);
upd.add(c);
labelDelta.add(format(name, c.getValue()));
categories.put(
at.getCategory().getId(),
at.getValue(c.getValue()).getId());
} else if (c == null) {
c = new PatchSetApproval(new PatchSetApproval.Key(
rsrc.getPatchSet().getId(),
rsrc.getAuthorId(),
at.getCategory().getId()),
ent.getValue());
c.setGranted(timestamp);
c.cache(change);
ins.add(c);
labelDelta.add(format(name, c.getValue()));
categories.put(
at.getCategory().getId(),
at.getValue(c.getValue()).getId());
}
}
db.patchSetApprovals().delete(del);
db.patchSetApprovals().insert(ins);
db.patchSetApprovals().update(upd);
}
private Map<String, PatchSetApproval> scanLabels(RevisionResource rsrc,
List<PatchSetApproval> del) throws OrmException {
Map<String, PatchSetApproval> current = Maps.newHashMap();
for (PatchSetApproval a : db.patchSetApprovals().byPatchSetUser(
rsrc.getPatchSet().getId(), rsrc.getAuthorId())) {
if (ApprovalCategory.SUBMIT.equals(a.getCategoryId())) {
continue;
}
ApprovalType at = approvalTypes.byId(a.getCategoryId());
if (at != null) {
current.put(at.getCategory().getLabelName(), a);
} else {
del.add(a);
}
}
return current;
}
private static String format(String name, short value) {
StringBuilder sb = new StringBuilder(name.length() + 2);
sb.append(name);
if (value >= 0) {
sb.append('+');
}
sb.append(value);
return sb.toString();
}
private void insertMessage(RevisionResource rsrc, String msg)
throws OrmException {
msg = Strings.nullToEmpty(msg).trim();
StringBuilder buf = new StringBuilder();
buf.append(String.format(
"Patch Set %d:",
rsrc.getPatchSet().getPatchSetId()));
for (String d : labelDelta) {
buf.append(" ").append(d);
}
if (comments.size() == 1) {
buf.append("\n\n(1 inline comment)");
} else if (comments.size() > 1) {
buf.append(String.format("\n\n(%d inline comments)", comments.size()));
}
if (!msg.isEmpty()) {
buf.append("\n\n").append(msg);
}
message = new ChangeMessage(
new ChangeMessage.Key(change.getId(), ChangeUtil.messageUUID(db)),
rsrc.getAuthorId(),
timestamp,
rsrc.getPatchSet().getId());
message.setMessage(buf.toString());
db.changeMessages().insert(Collections.singleton(message));
}
@Deprecated
private void fireCommentAddedHook(RevisionResource rsrc) {
IdentifiedUser user = (IdentifiedUser) rsrc.getControl().getCurrentUser();
try {
hooks.doCommentAddedHook(change,
user.getAccount(),
rsrc.getPatchSet(),
message.getMessage(),
categories, db);
} catch (OrmException e) {
log.warn("ChangeHook.doCommentAddedHook delivery failed", e);
}
}
}
| true | true | private void checkLabels(RevisionResource revision, boolean strict,
Map<String, Short> labels) throws BadRequestException, AuthException {
ChangeControl ctl = revision.getControl();
Iterator<Map.Entry<String, Short>> itr = labels.entrySet().iterator();
while (itr.hasNext()) {
Map.Entry<String, Short> ent = itr.next();
// TODO Support more generic label assignments.
ApprovalType at = approvalTypes.byLabel(ent.getKey());
if (at == null) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\" is not a configured ApprovalCategory",
ent.getKey()));
} else {
itr.remove();
continue;
}
}
if (ent.getValue() == null || ent.getValue() == 0) {
// Always permit 0, even if it is not within range.
// Later null/0 will be deleted and revoke the label.
continue;
}
if (!at.getValuesAsList().contains(ent.getValue())) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\": %d is not a valid value",
ent.getKey(), ent.getValue()));
} else {
itr.remove();
continue;
}
}
String name = at.getCategory().getLabelName();
PermissionRange range = ctl.getRange(Permission.forLabel(name));
if (range == null || !range.contains(ent.getValue())) {
if (strict) {
throw new AuthException(String.format(
"Applying label \"%s\": %d is restricted",
ent.getKey(), ent.getValue()));
} else if (range == null || range.isEmpty()) {
ent.setValue((short) 0);
} else {
ent.setValue((short) range.squash(ent.getValue()));
}
}
}
}
| private void checkLabels(RevisionResource revision, boolean strict,
Map<String, Short> labels) throws BadRequestException, AuthException {
ChangeControl ctl = revision.getControl();
Iterator<Map.Entry<String, Short>> itr = labels.entrySet().iterator();
while (itr.hasNext()) {
Map.Entry<String, Short> ent = itr.next();
// TODO Support more generic label assignments.
ApprovalType at = approvalTypes.byLabel(ent.getKey());
if (at == null) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\" is not a configured ApprovalCategory",
ent.getKey()));
} else {
itr.remove();
continue;
}
}
if (ent.getValue() == null || ent.getValue() == 0) {
// Always permit 0, even if it is not within range.
// Later null/0 will be deleted and revoke the label.
continue;
}
if (at.getValue(ent.getValue()) == null) {
if (strict) {
throw new BadRequestException(String.format(
"label \"%s\": %d is not a valid value",
ent.getKey(), ent.getValue()));
} else {
itr.remove();
continue;
}
}
String name = at.getCategory().getLabelName();
PermissionRange range = ctl.getRange(Permission.forLabel(name));
if (range == null || !range.contains(ent.getValue())) {
if (strict) {
throw new AuthException(String.format(
"Applying label \"%s\": %d is restricted",
ent.getKey(), ent.getValue()));
} else if (range == null || range.isEmpty()) {
ent.setValue((short) 0);
} else {
ent.setValue((short) range.squash(ent.getValue()));
}
}
}
}
|
diff --git a/src/DistGrep/ConnectionHandler.java b/src/DistGrep/ConnectionHandler.java
index f8fb630..88de458 100644
--- a/src/DistGrep/ConnectionHandler.java
+++ b/src/DistGrep/ConnectionHandler.java
@@ -1,219 +1,225 @@
package DistGrep;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.*;
import java.util.Enumeration;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created with IntelliJ IDEA.
* User: kyle
* Date: 9/14/13
* Time: 8:58 PM
* To change this template use File | Settings | File Templates.
*/
public class ConnectionHandler implements Runnable {
private LinkedBlockingQueue<Socket> connectionQueue;
private String searchPath;
private boolean shouldRun = true;
private Config conf;
public ConnectionHandler(LinkedBlockingQueue<Socket> connectionQueue, Config conf) {
this.connectionQueue = connectionQueue;
this.conf = conf;
this.searchPath = conf.valueFor("searchPath");
}
public void kill() {
this.shouldRun = false;
}
public void run() {
System.out.println("[" + this.getClass().toString() + "]: Waiting to handle accepted connections: Started");
while(shouldRun) {
// Poll the connection queue for an accepted connection.
Socket clientSocket = null;
InetAddress clientAddress = null;
try {
clientSocket = connectionQueue.poll(1, TimeUnit.MINUTES);
}
catch (InterruptedException e) {
break;
}
//If we timed out or our thread was interrupted, continue.
if(clientSocket == null)
continue;
clientAddress = clientSocket.getInetAddress();
System.out.println("[" + this.getClass().toString() + "]: Got connection from: " + clientAddress);
String clientMessage;
//Attempt to get the message from the client.
try {
clientMessage = readStringFromConnection(clientSocket);
//clientSocket.close();
}
catch (IOException e) {
System.err.println("Failed to get message from client. " + e);
continue;
}
System.out.println("[" + this.getClass().toString() + "]: Got message from: " + clientAddress);
- String[] parsedMessage = parseMessage(clientMessage);
+ String[] parsedMessage;
+ try {
+ parsedMessage = parseMessage(clientMessage);
+ }
+ catch (IllegalStateException e) {
+ continue;
+ }
String header = parsedMessage[0];
String body = parsedMessage[1];
//If a request was sent to this machine, it will execute a grep and sends the results back to the initiator
// if(header.equalsIgnoreCase("searchrequest")) {
System.out.println("[" + this.getClass().toString() + "]: Running search for: " + clientAddress);
CommandExecutor grepExecutor = null;
try {
grepExecutor = Search.runSearch(searchPath, body);
}
catch (IOException e) {
System.err.println("Failed to generate search results. " + e);
}
catch (InterruptedException e) {
break;
}
try {
System.out.println("[" + this.getClass().toString() + "]: Delivering results to: " + clientAddress);
deliverResults(clientSocket, grepExecutor);
clientSocket.close();
}
catch (SocketException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to enumerate network devices. " + e);
continue;
}
catch (IOException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to deliver results to client. " + e);
continue;
}
/*
} else if(header.equalsIgnoreCase("searchresult")) {
body = body.replace("<br>", "\n");
System.out.println("Search results from " + clientAddress + ":\n----------\n" + body);
}
*/
}
System.out.println("[" + this.getClass().toString() + "] is dying.");
}
//Reads a string message from a client.
private String readStringFromConnection(Socket clientSocket) throws IOException {
String clientMessage = null;
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
char[] buffer = new char[3000];
int numberOfChars = bufferedReader.read(buffer, 0, 3000);
clientMessage = new String(buffer, 0, numberOfChars);
return clientMessage;
}
// Parse the received XML-message and extract header and body information
// Returns a string array of size 2. The first member is the header,
// The second is the body.
private String[] parseMessage(String clientMessage) throws NullPointerException, IllegalStateException {
String header = null;
String body = null;
clientMessage = clientMessage.replace("\n", "<br>");
final Pattern headerpattern = Pattern.compile("<header>(.+?)</header>");
final Matcher headermatcher = headerpattern.matcher(clientMessage);
headermatcher.find();
header = headermatcher.group(1);
final Pattern bodypattern = Pattern.compile("<body>(.+?)</body>");
final Matcher bodymatcher = bodypattern.matcher(clientMessage);
bodymatcher.find();
body = bodymatcher.group(1);
String[] parsedMessage = new String[2];
parsedMessage[0] = header;
parsedMessage[1] = body;
return parsedMessage;
}
private void deliverResults(Socket clientSocket, CommandExecutor grepExecutor) throws SocketException, IOException {
OutputStream clientOutputStream = clientSocket.getOutputStream();
BufferedReader processOutput = grepExecutor.getProcessReader();
String line;
while(!grepExecutor.processIsTerminated()) {
line = processOutput.readLine();
if(line != null)
clientOutputStream.write((line + "\n").getBytes());
}
while((line = processOutput.readLine()) != null) {
clientOutputStream.write((line + "\n").getBytes());
}
/*
while ((line = b.readLine()) != null) {
text += line + "\n";
System.out.println(text.length());
System.out.println(p.exitValue());
p.
}
clientSocket.getOutputStream().write(searchResults.getBytes());
/*
if(isLocalInetAddress(clientSocket)) {
System.out.println("Search results from localhost:\n"+searchResults+"---\n");
} else {
Connection backcon = new Connection(conf);
String[] receiver = new String[1];
receiver[0] = clientSocket.toString().substring(1);
backcon.sendMessage(searchResults, "searchresult", receiver);
}
*/
}
//Check, if an address is local.
private boolean isLocalInetAddress(InetAddress addr) throws SocketException {
Enumeration<NetworkInterface> n = NetworkInterface.getNetworkInterfaces();
while(n.hasMoreElements())
{
NetworkInterface e =(NetworkInterface) n.nextElement();
Enumeration ee = e.getInetAddresses();
while(ee.hasMoreElements())
{
InetAddress i= (InetAddress) ee.nextElement();
if(addr.toString().substring(1).equalsIgnoreCase(i.getHostAddress().toString())) {
return true;
}
}
}
return false;
}
}
| true | true | public void run() {
System.out.println("[" + this.getClass().toString() + "]: Waiting to handle accepted connections: Started");
while(shouldRun) {
// Poll the connection queue for an accepted connection.
Socket clientSocket = null;
InetAddress clientAddress = null;
try {
clientSocket = connectionQueue.poll(1, TimeUnit.MINUTES);
}
catch (InterruptedException e) {
break;
}
//If we timed out or our thread was interrupted, continue.
if(clientSocket == null)
continue;
clientAddress = clientSocket.getInetAddress();
System.out.println("[" + this.getClass().toString() + "]: Got connection from: " + clientAddress);
String clientMessage;
//Attempt to get the message from the client.
try {
clientMessage = readStringFromConnection(clientSocket);
//clientSocket.close();
}
catch (IOException e) {
System.err.println("Failed to get message from client. " + e);
continue;
}
System.out.println("[" + this.getClass().toString() + "]: Got message from: " + clientAddress);
String[] parsedMessage = parseMessage(clientMessage);
String header = parsedMessage[0];
String body = parsedMessage[1];
//If a request was sent to this machine, it will execute a grep and sends the results back to the initiator
// if(header.equalsIgnoreCase("searchrequest")) {
System.out.println("[" + this.getClass().toString() + "]: Running search for: " + clientAddress);
CommandExecutor grepExecutor = null;
try {
grepExecutor = Search.runSearch(searchPath, body);
}
catch (IOException e) {
System.err.println("Failed to generate search results. " + e);
}
catch (InterruptedException e) {
break;
}
try {
System.out.println("[" + this.getClass().toString() + "]: Delivering results to: " + clientAddress);
deliverResults(clientSocket, grepExecutor);
clientSocket.close();
}
catch (SocketException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to enumerate network devices. " + e);
continue;
}
catch (IOException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to deliver results to client. " + e);
continue;
}
/*
} else if(header.equalsIgnoreCase("searchresult")) {
body = body.replace("<br>", "\n");
System.out.println("Search results from " + clientAddress + ":\n----------\n" + body);
}
*/
}
| public void run() {
System.out.println("[" + this.getClass().toString() + "]: Waiting to handle accepted connections: Started");
while(shouldRun) {
// Poll the connection queue for an accepted connection.
Socket clientSocket = null;
InetAddress clientAddress = null;
try {
clientSocket = connectionQueue.poll(1, TimeUnit.MINUTES);
}
catch (InterruptedException e) {
break;
}
//If we timed out or our thread was interrupted, continue.
if(clientSocket == null)
continue;
clientAddress = clientSocket.getInetAddress();
System.out.println("[" + this.getClass().toString() + "]: Got connection from: " + clientAddress);
String clientMessage;
//Attempt to get the message from the client.
try {
clientMessage = readStringFromConnection(clientSocket);
//clientSocket.close();
}
catch (IOException e) {
System.err.println("Failed to get message from client. " + e);
continue;
}
System.out.println("[" + this.getClass().toString() + "]: Got message from: " + clientAddress);
String[] parsedMessage;
try {
parsedMessage = parseMessage(clientMessage);
}
catch (IllegalStateException e) {
continue;
}
String header = parsedMessage[0];
String body = parsedMessage[1];
//If a request was sent to this machine, it will execute a grep and sends the results back to the initiator
// if(header.equalsIgnoreCase("searchrequest")) {
System.out.println("[" + this.getClass().toString() + "]: Running search for: " + clientAddress);
CommandExecutor grepExecutor = null;
try {
grepExecutor = Search.runSearch(searchPath, body);
}
catch (IOException e) {
System.err.println("Failed to generate search results. " + e);
}
catch (InterruptedException e) {
break;
}
try {
System.out.println("[" + this.getClass().toString() + "]: Delivering results to: " + clientAddress);
deliverResults(clientSocket, grepExecutor);
clientSocket.close();
}
catch (SocketException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to enumerate network devices. " + e);
continue;
}
catch (IOException e) {
System.err.println("[" + this.getClass().toString() + "]: Failed to deliver results to client. " + e);
continue;
}
/*
} else if(header.equalsIgnoreCase("searchresult")) {
body = body.replace("<br>", "\n");
System.out.println("Search results from " + clientAddress + ":\n----------\n" + body);
}
*/
}
|
diff --git a/src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java b/src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
index aa6705418..1670b474d 100644
--- a/src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
+++ b/src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializerFactory.java
@@ -1,846 +1,848 @@
package com.fasterxml.jackson.databind.deser;
import java.lang.reflect.Type;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.annotation.ObjectIdGenerator;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
import com.fasterxml.jackson.databind.cfg.DeserializerFactoryConfig;
import com.fasterxml.jackson.databind.deser.impl.FieldProperty;
import com.fasterxml.jackson.databind.deser.impl.MethodProperty;
import com.fasterxml.jackson.databind.deser.impl.ObjectIdReader;
import com.fasterxml.jackson.databind.deser.impl.PropertyBasedObjectIdGenerator;
import com.fasterxml.jackson.databind.deser.impl.SetterlessProperty;
import com.fasterxml.jackson.databind.deser.std.JdkDeserializers;
import com.fasterxml.jackson.databind.deser.std.ThrowableDeserializer;
import com.fasterxml.jackson.databind.introspect.*;
import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
import com.fasterxml.jackson.databind.type.ClassKey;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.databind.util.ArrayBuilders;
import com.fasterxml.jackson.databind.util.ClassUtil;
import com.fasterxml.jackson.databind.util.SimpleBeanPropertyDefinition;
/**
* Concrete deserializer factory class that adds full Bean deserializer
* construction logic using class introspection.
* Note that factories specifically do not implement any form of caching:
* aside from configuration they are stateless; caching is implemented
* by other components.
*<p>
* Instances of this class are fully immutable as all configuration is
* done by using "fluent factories" (methods that construct new factory
* instances with different configuration, instead of modifying instance).
*/
public class BeanDeserializerFactory
extends BasicDeserializerFactory
implements java.io.Serializable // since 2.1
{
private static final long serialVersionUID = 1;
/**
* Signature of <b>Throwable.initCause</b> method.
*/
private final static Class<?>[] INIT_CAUSE_PARAMS = new Class<?>[] { Throwable.class };
private final static Class<?>[] NO_VIEWS = new Class<?>[0];
/*
/**********************************************************
/* Life-cycle
/**********************************************************
*/
/**
* Globally shareable thread-safe instance which has no additional custom deserializers
* registered
*/
public final static BeanDeserializerFactory instance = new BeanDeserializerFactory(
new DeserializerFactoryConfig());
public BeanDeserializerFactory(DeserializerFactoryConfig config) {
super(config);
}
/**
* Method used by module registration functionality, to construct a new bean
* deserializer factory
* with different configuration settings.
*/
@Override
public DeserializerFactory withConfig(DeserializerFactoryConfig config)
{
if (_factoryConfig == config) {
return this;
}
/* 22-Nov-2010, tatu: Handling of subtypes is tricky if we do immutable-with-copy-ctor;
* and we pretty much have to here either choose between losing subtype instance
* when registering additional deserializers, or losing deserializers.
* Instead, let's actually just throw an error if this method is called when subtype
* has not properly overridden this method; this to indicate problem as soon as possible.
*/
if (getClass() != BeanDeserializerFactory.class) {
throw new IllegalStateException("Subtype of BeanDeserializerFactory ("+getClass().getName()
+") has not properly overridden method 'withAdditionalDeserializers': can not instantiate subtype with "
+"additional deserializer definitions");
}
return new BeanDeserializerFactory(config);
}
/*
/**********************************************************
/* Overrides for super-class methods used for finding
/* custom deserializers
/**********************************************************
*/
// Note: NOT overriding, superclass has no matching method
@SuppressWarnings("unchecked")
protected JsonDeserializer<Object> _findCustomBeanDeserializer(JavaType type,
DeserializationConfig config, BeanDescription beanDesc)
throws JsonMappingException
{
for (Deserializers d : _factoryConfig.deserializers()) {
JsonDeserializer<?> deser = d.findBeanDeserializer(type, config, beanDesc);
if (deser != null) {
return (JsonDeserializer<Object>) deser;
}
}
return null;
}
/*
/**********************************************************
/* DeserializerFactory API implementation
/**********************************************************
*/
/**
* Method that {@link DeserializerCache}s call to create a new
* deserializer for types other than Collections, Maps, arrays and
* enums.
*/
@Override
public JsonDeserializer<Object> createBeanDeserializer(DeserializationContext ctxt,
JavaType type, BeanDescription beanDesc)
throws JsonMappingException
{
final DeserializationConfig config = ctxt.getConfig();
// We may also have custom overrides:
JsonDeserializer<Object> custom = _findCustomBeanDeserializer(type, config, beanDesc);
if (custom != null) {
return custom;
}
/* One more thing to check: do we have an exception type
* (Throwable or its sub-classes)? If so, need slightly
* different handling.
*/
if (type.isThrowable()) {
return buildThrowableDeserializer(ctxt, type, beanDesc);
}
/* Or, for abstract types, may have alternate means for resolution
* (defaulting, materialization)
*/
if (type.isAbstract()) {
// [JACKSON-41] (v1.6): Let's make it possible to materialize abstract types.
JavaType concreteType = materializeAbstractType(config, beanDesc);
if (concreteType != null) {
/* important: introspect actual implementation (abstract class or
* interface doesn't have constructors, for one)
*/
beanDesc = config.introspect(concreteType);
return buildBeanDeserializer(ctxt, concreteType, beanDesc);
}
}
// Otherwise, may want to check handlers for standard types, from superclass:
JsonDeserializer<Object> deser = findStdDeserializer(config, type);
if (deser != null) {
return deser;
}
// Otherwise: could the class be a Bean class? If not, bail out
if (!isPotentialBeanType(type.getRawClass())) {
return null;
}
// Use generic bean introspection to build deserializer
return buildBeanDeserializer(ctxt, type, beanDesc);
}
@Override
public JsonDeserializer<Object> createBuilderBasedDeserializer(
DeserializationContext ctxt, JavaType valueType, BeanDescription beanDesc,
Class<?> builderClass)
throws JsonMappingException
{
// First: need a BeanDescription for builder class
JavaType builderType = ctxt.constructType(builderClass);
BeanDescription builderDesc = ctxt.getConfig().introspectForBuilder(builderType);
return buildBuilderBasedDeserializer(ctxt, valueType, builderDesc);
}
/**
* Method called by {@link BeanDeserializerFactory} to see if there might be a standard
* deserializer registered for given type.
*/
@SuppressWarnings("unchecked")
protected JsonDeserializer<Object> findStdDeserializer(DeserializationConfig config,
JavaType type)
throws JsonMappingException
{
Class<?> cls = type.getRawClass();
// note: we do NOT check for custom deserializers here; that's for sub-class to do
JsonDeserializer<Object> deser = _simpleDeserializers.get(new ClassKey(cls));
if (deser != null) {
return deser;
}
// [JACKSON-283]: AtomicReference is a rather special type...
if (AtomicReference.class.isAssignableFrom(cls)) {
// Must find parameterization
TypeFactory tf = config.getTypeFactory();
JavaType[] params = tf.findTypeParameters(type, AtomicReference.class);
JavaType referencedType;
if (params == null || params.length < 1) { // untyped (raw)
referencedType = TypeFactory.unknownType();
} else {
referencedType = params[0];
}
JsonDeserializer<?> d2 = new JdkDeserializers.AtomicReferenceDeserializer(referencedType);
return (JsonDeserializer<Object>)d2;
}
// [JACKSON-386]: External/optional type handlers are handled somewhat differently
JsonDeserializer<?> d = optionalHandlers.findDeserializer(type, config);
if (d != null) {
return (JsonDeserializer<Object>)d;
}
return null;
}
protected JavaType materializeAbstractType(DeserializationConfig config,
BeanDescription beanDesc)
throws JsonMappingException
{
final JavaType abstractType = beanDesc.getType();
/* [JACKSON-502] (1.8): Now it is possible to have multiple resolvers too,
* as they are registered via module interface.
*/
for (AbstractTypeResolver r : _factoryConfig.abstractTypeResolvers()) {
JavaType concrete = r.resolveAbstractType(config, abstractType);
if (concrete != null) {
return concrete;
}
}
return null;
}
/*
/**********************************************************
/* Public construction method beyond DeserializerFactory API:
/* can be called from outside as well as overridden by
/* sub-classes
/**********************************************************
*/
/**
* Method that is to actually build a bean deserializer instance.
* All basic sanity checks have been done to know that what we have
* may be a valid bean type, and that there are no default simple
* deserializers.
*/
@SuppressWarnings("unchecked")
public JsonDeserializer<Object> buildBeanDeserializer(DeserializationContext ctxt,
JavaType type, BeanDescription beanDesc)
throws JsonMappingException
{
// First: check what creators we can use, if any
ValueInstantiator valueInstantiator = findValueInstantiator(ctxt, beanDesc);
BeanDeserializerBuilder builder = constructBeanDeserializerBuilder(ctxt, beanDesc);
builder.setValueInstantiator(valueInstantiator);
// And then setters for deserializing from JSON Object
addBeanProps(ctxt, beanDesc, builder);
addObjectIdReader(ctxt, beanDesc, builder);
// managed/back reference fields/setters need special handling... first part
addReferenceProperties(ctxt, beanDesc, builder);
addInjectables(ctxt, beanDesc, builder);
final DeserializationConfig config = ctxt.getConfig();
// [JACKSON-440]: update builder now that all information is in?
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
builder = mod.updateBuilder(config, beanDesc, builder);
}
}
JsonDeserializer<?> deserializer;
/* 19-Mar-2012, tatu: This check used to be done earlier; but we have to defer
* it a bit to collect information on ObjectIdReader, for example.
*/
if (type.isAbstract() && !valueInstantiator.canInstantiate()) {
deserializer = builder.buildAbstract();
} else {
deserializer = builder.build();
}
// [JACKSON-440]: may have modifier(s) that wants to modify or replace serializer we just built:
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
deserializer = mod.modifyDeserializer(config, beanDesc, deserializer);
}
}
return (JsonDeserializer<Object>) deserializer;
}
/**
* Method for constructing a bean deserializer that uses specified
* intermediate Builder for binding data, and construction of the
* value instance.
* Note that implementation is mostly copied from the regular
* BeanDeserializer build method.
*/
@SuppressWarnings("unchecked")
protected JsonDeserializer<Object> buildBuilderBasedDeserializer(
DeserializationContext ctxt, JavaType valueType, BeanDescription builderDesc)
throws JsonMappingException
{
// Creators, anyone? (to create builder itself)
ValueInstantiator valueInstantiator = findValueInstantiator(ctxt, builderDesc);
final DeserializationConfig config = ctxt.getConfig();
BeanDeserializerBuilder builder = constructBeanDeserializerBuilder(ctxt, builderDesc);
builder.setValueInstantiator(valueInstantiator);
// And then "with methods" for deserializing from JSON Object
addBeanProps(ctxt, builderDesc, builder);
addObjectIdReader(ctxt, builderDesc, builder);
// managed/back reference fields/setters need special handling... first part
addReferenceProperties(ctxt, builderDesc, builder);
addInjectables(ctxt, builderDesc, builder);
JsonPOJOBuilder.Value builderConfig = builderDesc.findPOJOBuilderConfig();
final String buildMethodName = (builderConfig == null) ?
"build" : builderConfig.buildMethodName;
// and lastly, find build method to use:
AnnotatedMethod buildMethod = builderDesc.findMethod(buildMethodName, null);
if (buildMethod != null) { // note: can't yet throw error; may be given build method
if (config.canOverrideAccessModifiers()) {
ClassUtil.checkAndFixAccess(buildMethod.getMember());
}
}
builder.setPOJOBuilder(buildMethod, builderConfig);
// this may give us more information...
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
builder = mod.updateBuilder(config, builderDesc, builder);
}
}
JsonDeserializer<?> deserializer = builder.buildBuilderBased(
valueType, buildMethodName);
// [JACKSON-440]: may have modifier(s) that wants to modify or replace serializer we just built:
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
deserializer = mod.modifyDeserializer(config, builderDesc, deserializer);
}
}
return (JsonDeserializer<Object>) deserializer;
}
protected void addObjectIdReader(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
ObjectIdInfo objectIdInfo = beanDesc.getObjectIdInfo();
if (objectIdInfo == null) {
return;
}
Class<?> implClass = objectIdInfo.getGeneratorType();
JavaType idType;
SettableBeanProperty idProp;
ObjectIdGenerator<?> gen;
// Just one special case: Property-based generator is trickier
if (implClass == ObjectIdGenerators.PropertyGenerator.class) { // most special one, needs extra work
String propName = objectIdInfo.getPropertyName();
idProp = builder.findProperty(propName);
if (idProp == null) {
throw new IllegalArgumentException("Invalid Object Id definition for "
+beanDesc.getBeanClass().getName()+": can not find property with name '"+propName+"'");
}
idType = idProp.getType();
gen = new PropertyBasedObjectIdGenerator(objectIdInfo.getScope());
} else {
JavaType type = ctxt.constructType(implClass);
idType = ctxt.getTypeFactory().findTypeParameters(type, ObjectIdGenerator.class)[0];
idProp = null;
gen = ctxt.objectIdGeneratorInstance(beanDesc.getClassInfo(), objectIdInfo);
}
// also: unlike with value deserializers, let's just resolve one we need here
JsonDeserializer<?> deser = ctxt.findRootValueDeserializer(idType);
builder.setObjectIdReader(ObjectIdReader.construct(idType,
objectIdInfo.getPropertyName(), gen, deser, idProp));
}
@SuppressWarnings("unchecked")
public JsonDeserializer<Object> buildThrowableDeserializer(DeserializationContext ctxt,
JavaType type, BeanDescription beanDesc)
throws JsonMappingException
{
final DeserializationConfig config = ctxt.getConfig();
// first: construct like a regular bean deserializer...
BeanDeserializerBuilder builder = constructBeanDeserializerBuilder(ctxt, beanDesc);
builder.setValueInstantiator(findValueInstantiator(ctxt, beanDesc));
addBeanProps(ctxt, beanDesc, builder);
// (and assume there won't be any back references)
// But then let's decorate things a bit
/* To resolve [JACKSON-95], need to add "initCause" as setter
* for exceptions (sub-classes of Throwable).
*/
AnnotatedMethod am = beanDesc.findMethod("initCause", INIT_CAUSE_PARAMS);
if (am != null) { // should never be null
SimpleBeanPropertyDefinition propDef = new SimpleBeanPropertyDefinition(am, "cause");
SettableBeanProperty prop = constructSettableProperty(ctxt, beanDesc, propDef,
am.getGenericParameterType(0));
if (prop != null) {
/* 21-Aug-2011, tatus: We may actually have found 'cause' property
* to set (with new 1.9 code)... but let's replace it just in case,
* otherwise can end up with odd errors.
*/
builder.addOrReplaceProperty(prop, true);
}
}
// And also need to ignore "localizedMessage"
builder.addIgnorable("localizedMessage");
// [JACKSON-794]: JDK 7 also added "getSuppressed", skip if we have such data:
builder.addIgnorable("suppressed");
/* As well as "message": it will be passed via constructor,
* as there's no 'setMessage()' method
*/
builder.addIgnorable("message");
// [JACKSON-440]: update builder now that all information is in?
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
builder = mod.updateBuilder(config, beanDesc, builder);
}
}
JsonDeserializer<?> deserializer = builder.build();
/* At this point it ought to be a BeanDeserializer; if not, must assume
* it's some other thing that can handle deserialization ok...
*/
if (deserializer instanceof BeanDeserializer) {
deserializer = new ThrowableDeserializer((BeanDeserializer) deserializer);
}
// [JACKSON-440]: may have modifier(s) that wants to modify or replace serializer we just built:
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
deserializer = mod.modifyDeserializer(config, beanDesc, deserializer);
}
}
return (JsonDeserializer<Object>) deserializer;
}
/*
/**********************************************************
/* Helper methods for Bean deserializer construction,
/* overridable by sub-classes
/**********************************************************
*/
/**
* Overridable method that constructs a {@link BeanDeserializerBuilder}
* which is used to accumulate information needed to create deserializer
* instance.
*/
protected BeanDeserializerBuilder constructBeanDeserializerBuilder(DeserializationContext ctxt,
BeanDescription beanDesc) {
return new BeanDeserializerBuilder(beanDesc, ctxt.getConfig());
}
/**
* Method called to figure out settable properties for the
* bean deserializer to use.
*<p>
* Note: designed to be overridable, and effort is made to keep interface
* similar between versions.
*/
protected void addBeanProps(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
final SettableBeanProperty[] creatorProps =
builder.getValueInstantiator().getFromObjectArguments(ctxt.getConfig());
// Things specified as "ok to ignore"? [JACKSON-77]
AnnotationIntrospector intr = ctxt.getAnnotationIntrospector();
boolean ignoreAny = false;
{
Boolean B = intr.findIgnoreUnknownProperties(beanDesc.getClassInfo());
if (B != null) {
ignoreAny = B.booleanValue();
builder.setIgnoreUnknownProperties(ignoreAny);
}
}
// Or explicit/implicit definitions?
Set<String> ignored = ArrayBuilders.arrayToSet(intr.findPropertiesToIgnore(beanDesc.getClassInfo()));
for (String propName : ignored) {
builder.addIgnorable(propName);
}
// Also, do we have a fallback "any" setter?
AnnotatedMethod anySetter = beanDesc.findAnySetter();
if (anySetter != null) {
builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetter));
}
// NOTE: we do NOT add @JsonIgnore'd properties into blocked ones if there's any setter
// Implicit ones via @JsonIgnore and equivalent?
if (anySetter == null) {
Collection<String> ignored2 = beanDesc.getIgnoredPropertyNames();
if (ignored2 != null) {
for (String propName : ignored2) {
// allow ignoral of similarly named JSON property, but do not force;
// latter means NOT adding this to 'ignored':
builder.addIgnorable(propName);
}
}
}
final boolean useGettersAsSetters = (ctxt.isEnabled(MapperFeature.USE_GETTERS_AS_SETTERS)
&& ctxt.isEnabled(MapperFeature.AUTO_DETECT_GETTERS));
// Ok: let's then filter out property definitions
List<BeanPropertyDefinition> propDefs = filterBeanProps(ctxt,
beanDesc, builder, beanDesc.findProperties(), ignored);
// After which we can let custom code change the set
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
propDefs = mod.updateProperties(ctxt.getConfig(), beanDesc, propDefs);
}
}
// At which point we still have all kinds of properties; not all with mutators:
for (BeanPropertyDefinition propDef : propDefs) {
SettableBeanProperty prop = null;
if (propDef.hasConstructorParameter()) {
/* [JACKSON-700] If property is passed via constructor parameter, we must
* handle things in special way. Not sure what is the most optimal way...
* for now, let's just call a (new) method in builder, which does nothing.
*/
// but let's call a method just to allow custom builders to be aware...
final String name = propDef.getName();
- for (SettableBeanProperty cp : creatorProps) {
- if (name.equals(cp.getName())) {
- prop = cp;
- break;
+ if (creatorProps != null) {
+ for (SettableBeanProperty cp : creatorProps) {
+ if (name.equals(cp.getName())) {
+ prop = cp;
+ break;
+ }
}
}
if (prop == null) {
throw ctxt.mappingException("Could not find creator property with name '"+name+"'");
}
builder.addCreatorProperty(prop);
continue;
}
if (propDef.hasSetter()) {
Type propertyType = propDef.getSetter().getGenericParameterType(0);
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (propDef.hasField()) {
Type propertyType = propDef.getField().getGenericType();
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (useGettersAsSetters && propDef.hasGetter()) {
/* As per [JACKSON-88], may also need to consider getters
* for Map/Collection properties; but with lowest precedence
*/
AnnotatedMethod getter = propDef.getGetter();
// should only consider Collections and Maps, for now?
Class<?> rawPropertyType = getter.getRawType();
if (Collection.class.isAssignableFrom(rawPropertyType)
|| Map.class.isAssignableFrom(rawPropertyType)) {
prop = constructSetterlessProperty(ctxt, beanDesc, propDef);
}
}
if (prop != null) {
Class<?>[] views = propDef.findViews();
if (views == null) {
// one more twist: if default inclusion disabled, need to force empty set of views
if (!ctxt.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)) {
views = NO_VIEWS;
}
}
// one more thing before adding to builder: copy any metadata
prop.setViews(views);
builder.addProperty(prop);
}
}
}
/**
* Helper method called to filter out explicit ignored properties,
* as well as properties that have "ignorable types".
* Note that this will not remove properties that have no
* setters.
*/
protected List<BeanPropertyDefinition> filterBeanProps(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder,
List<BeanPropertyDefinition> propDefsIn,
Set<String> ignored)
throws JsonMappingException
{
ArrayList<BeanPropertyDefinition> result = new ArrayList<BeanPropertyDefinition>(
Math.max(4, propDefsIn.size()));
HashMap<Class<?>,Boolean> ignoredTypes = new HashMap<Class<?>,Boolean>();
// These are all valid setters, but we do need to introspect bit more
for (BeanPropertyDefinition property : propDefsIn) {
String name = property.getName();
if (ignored.contains(name)) { // explicit ignoral using @JsonIgnoreProperties needs to block entries
continue;
}
if (!property.hasConstructorParameter()) { // never skip constructor params
Class<?> rawPropertyType = null;
if (property.hasSetter()) {
rawPropertyType = property.getSetter().getRawParameterType(0);
} else if (property.hasField()) {
rawPropertyType = property.getField().getRawType();
}
// [JACKSON-429] Some types are declared as ignorable as well
if ((rawPropertyType != null)
&& (isIgnorableType(ctxt.getConfig(), beanDesc, rawPropertyType, ignoredTypes))) {
// important: make ignorable, to avoid errors if value is actually seen
builder.addIgnorable(name);
continue;
}
}
result.add(property);
}
return result;
}
/**
* Method that will find if bean has any managed- or back-reference properties,
* and if so add them to bean, to be linked during resolution phase.
*/
protected void addReferenceProperties(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
// and then back references, not necessarily found as regular properties
Map<String,AnnotatedMember> refs = beanDesc.findBackReferenceProperties();
if (refs != null) {
for (Map.Entry<String, AnnotatedMember> en : refs.entrySet()) {
String name = en.getKey();
AnnotatedMember m = en.getValue();
Type genericType;
if (m instanceof AnnotatedMethod) {
genericType = ((AnnotatedMethod) m).getGenericParameterType(0);
} else {
genericType = m.getRawType();
}
SimpleBeanPropertyDefinition propDef = new SimpleBeanPropertyDefinition(m);
builder.addBackReferenceProperty(name, constructSettableProperty(
ctxt, beanDesc, propDef, genericType));
}
}
}
/**
* Method called locate all members used for value injection (if any),
* constructor {@link com.fasterxml.jackson.databind.deser.impl.ValueInjector} instances, and add them to builder.
*/
protected void addInjectables(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
Map<Object, AnnotatedMember> raw = beanDesc.findInjectables();
if (raw != null) {
boolean fixAccess = ctxt.canOverrideAccessModifiers();
for (Map.Entry<Object, AnnotatedMember> entry : raw.entrySet()) {
AnnotatedMember m = entry.getValue();
if (fixAccess) {
m.fixAccess(); // to ensure we can call it
}
builder.addInjectable(m.getName(), beanDesc.resolveType(m.getGenericType()),
beanDesc.getClassAnnotations(), m, entry.getKey());
}
}
}
/**
* Method called to construct fallback {@link SettableAnyProperty}
* for handling unknown bean properties, given a method that
* has been designated as such setter.
*/
protected SettableAnyProperty constructAnySetter(DeserializationContext ctxt,
BeanDescription beanDesc, AnnotatedMethod setter)
throws JsonMappingException
{
if (ctxt.canOverrideAccessModifiers()) {
setter.fixAccess(); // to ensure we can call it
}
// we know it's a 2-arg method, second arg is the value
JavaType type = beanDesc.bindingsForBeanType().resolveType(setter.getGenericParameterType(1));
BeanProperty.Std property = new BeanProperty.Std(setter.getName(), type, beanDesc.getClassAnnotations(), setter);
type = resolveType(ctxt, beanDesc, type, setter);
/* AnySetter can be annotated with @JsonClass (etc) just like a
* regular setter... so let's see if those are used.
* Returns null if no annotations, in which case binding will
* be done at a later point.
*/
JsonDeserializer<Object> deser = findDeserializerFromAnnotation(ctxt, setter);
if (deser != null) {
return new SettableAnyProperty(property, setter, type, deser);
}
/* Otherwise, method may specify more specific (sub-)class for
* value (no need to check if explicit deser was specified):
*/
type = modifyTypeByAnnotation(ctxt, setter, type);
return new SettableAnyProperty(property, setter, type, null);
}
/**
* Method that will construct a regular bean property setter using
* the given setter method.
*
* @return Property constructed, if any; or null to indicate that
* there should be no property based on given definitions.
*/
protected SettableBeanProperty constructSettableProperty(DeserializationContext ctxt,
BeanDescription beanDesc, BeanPropertyDefinition propDef,
Type jdkType)
throws JsonMappingException
{
// need to ensure method is callable (for non-public)
AnnotatedMember mutator = propDef.getMutator();
if (ctxt.canOverrideAccessModifiers()) {
mutator.fixAccess();
}
// note: this works since we know there's exactly one argument for methods
JavaType t0 = beanDesc.resolveType(jdkType);
BeanProperty.Std property = new BeanProperty.Std(propDef.getName(), t0, beanDesc.getClassAnnotations(), mutator);
JavaType type = resolveType(ctxt, beanDesc, t0, mutator);
// did type change?
if (type != t0) {
property = property.withType(type);
}
/* First: does the Method specify the deserializer to use?
* If so, let's use it.
*/
JsonDeserializer<Object> propDeser = findDeserializerFromAnnotation(ctxt, mutator);
type = modifyTypeByAnnotation(ctxt, mutator, type);
TypeDeserializer typeDeser = type.getTypeHandler();
SettableBeanProperty prop;
if (mutator instanceof AnnotatedMethod) {
prop = new MethodProperty(propDef, type, typeDeser,
beanDesc.getClassAnnotations(), (AnnotatedMethod) mutator);
} else {
prop = new FieldProperty(propDef, type, typeDeser,
beanDesc.getClassAnnotations(), (AnnotatedField) mutator);
}
if (propDeser != null) {
prop = prop.withValueDeserializer(propDeser);
}
// [JACKSON-235]: need to retain name of managed forward references:
AnnotationIntrospector.ReferenceProperty ref = propDef.findReferenceType();
if (ref != null && ref.isManagedReference()) {
prop.setManagedReferenceName(ref.getName());
}
return prop;
}
/**
* Method that will construct a regular bean property setter using
* the given setter method.
*/
protected SettableBeanProperty constructSetterlessProperty(DeserializationContext ctxt,
BeanDescription beanDesc, BeanPropertyDefinition propDef)
throws JsonMappingException
{
final AnnotatedMethod getter = propDef.getGetter();
// need to ensure it is callable now:
if (ctxt.canOverrideAccessModifiers()) {
getter.fixAccess();
}
/* 26-Jan-2012, tatu: Alas, this complication is still needed to handle
* (or at least work around) local type declarations...
*/
JavaType type = getter.getType(beanDesc.bindingsForBeanType());
/* First: does the Method specify the deserializer to use?
* If so, let's use it.
*/
JsonDeserializer<Object> propDeser = findDeserializerFromAnnotation(ctxt, getter);
type = modifyTypeByAnnotation(ctxt, getter, type);
TypeDeserializer typeDeser = type.getTypeHandler();
SettableBeanProperty prop = new SetterlessProperty(propDef, type, typeDeser,
beanDesc.getClassAnnotations(), getter);
if (propDeser != null) {
prop = prop.withValueDeserializer(propDeser);
}
return prop;
}
/*
/**********************************************************
/* Helper methods for Bean deserializer, other
/**********************************************************
*/
/**
* Helper method used to skip processing for types that we know
* can not be (i.e. are never consider to be) beans:
* things like primitives, Arrays, Enums, and proxy types.
*<p>
* Note that usually we shouldn't really be getting these sort of
* types anyway; but better safe than sorry.
*/
protected boolean isPotentialBeanType(Class<?> type)
{
String typeStr = ClassUtil.canBeABeanType(type);
if (typeStr != null) {
throw new IllegalArgumentException("Can not deserialize Class "+type.getName()+" (of type "+typeStr+") as a Bean");
}
if (ClassUtil.isProxyType(type)) {
throw new IllegalArgumentException("Can not deserialize Proxy class "+type.getName()+" as a Bean");
}
/* also: can't deserialize some local classes: static are ok; in-method not;
* and with [JACKSON-594], other non-static inner classes are ok
*/
typeStr = ClassUtil.isLocalType(type, true);
if (typeStr != null) {
throw new IllegalArgumentException("Can not deserialize Class "+type.getName()+" (of type "+typeStr+") as a Bean");
}
return true;
}
/**
* Helper method that will check whether given raw type is marked as always ignorable
* (for purpose of ignoring properties with type)
*/
protected boolean isIgnorableType(DeserializationConfig config, BeanDescription beanDesc,
Class<?> type, Map<Class<?>,Boolean> ignoredTypes)
{
Boolean status = ignoredTypes.get(type);
if (status == null) {
BeanDescription desc = config.introspectClassAnnotations(type);
status = config.getAnnotationIntrospector().isIgnorableType(desc.getClassInfo());
// We default to 'false', ie. not ignorable
if (status == null) {
status = Boolean.FALSE;
}
}
return status;
}
}
| true | true | protected void addBeanProps(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
final SettableBeanProperty[] creatorProps =
builder.getValueInstantiator().getFromObjectArguments(ctxt.getConfig());
// Things specified as "ok to ignore"? [JACKSON-77]
AnnotationIntrospector intr = ctxt.getAnnotationIntrospector();
boolean ignoreAny = false;
{
Boolean B = intr.findIgnoreUnknownProperties(beanDesc.getClassInfo());
if (B != null) {
ignoreAny = B.booleanValue();
builder.setIgnoreUnknownProperties(ignoreAny);
}
}
// Or explicit/implicit definitions?
Set<String> ignored = ArrayBuilders.arrayToSet(intr.findPropertiesToIgnore(beanDesc.getClassInfo()));
for (String propName : ignored) {
builder.addIgnorable(propName);
}
// Also, do we have a fallback "any" setter?
AnnotatedMethod anySetter = beanDesc.findAnySetter();
if (anySetter != null) {
builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetter));
}
// NOTE: we do NOT add @JsonIgnore'd properties into blocked ones if there's any setter
// Implicit ones via @JsonIgnore and equivalent?
if (anySetter == null) {
Collection<String> ignored2 = beanDesc.getIgnoredPropertyNames();
if (ignored2 != null) {
for (String propName : ignored2) {
// allow ignoral of similarly named JSON property, but do not force;
// latter means NOT adding this to 'ignored':
builder.addIgnorable(propName);
}
}
}
final boolean useGettersAsSetters = (ctxt.isEnabled(MapperFeature.USE_GETTERS_AS_SETTERS)
&& ctxt.isEnabled(MapperFeature.AUTO_DETECT_GETTERS));
// Ok: let's then filter out property definitions
List<BeanPropertyDefinition> propDefs = filterBeanProps(ctxt,
beanDesc, builder, beanDesc.findProperties(), ignored);
// After which we can let custom code change the set
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
propDefs = mod.updateProperties(ctxt.getConfig(), beanDesc, propDefs);
}
}
// At which point we still have all kinds of properties; not all with mutators:
for (BeanPropertyDefinition propDef : propDefs) {
SettableBeanProperty prop = null;
if (propDef.hasConstructorParameter()) {
/* [JACKSON-700] If property is passed via constructor parameter, we must
* handle things in special way. Not sure what is the most optimal way...
* for now, let's just call a (new) method in builder, which does nothing.
*/
// but let's call a method just to allow custom builders to be aware...
final String name = propDef.getName();
for (SettableBeanProperty cp : creatorProps) {
if (name.equals(cp.getName())) {
prop = cp;
break;
}
}
if (prop == null) {
throw ctxt.mappingException("Could not find creator property with name '"+name+"'");
}
builder.addCreatorProperty(prop);
continue;
}
if (propDef.hasSetter()) {
Type propertyType = propDef.getSetter().getGenericParameterType(0);
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (propDef.hasField()) {
Type propertyType = propDef.getField().getGenericType();
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (useGettersAsSetters && propDef.hasGetter()) {
/* As per [JACKSON-88], may also need to consider getters
* for Map/Collection properties; but with lowest precedence
*/
AnnotatedMethod getter = propDef.getGetter();
// should only consider Collections and Maps, for now?
Class<?> rawPropertyType = getter.getRawType();
if (Collection.class.isAssignableFrom(rawPropertyType)
|| Map.class.isAssignableFrom(rawPropertyType)) {
prop = constructSetterlessProperty(ctxt, beanDesc, propDef);
}
}
if (prop != null) {
Class<?>[] views = propDef.findViews();
if (views == null) {
// one more twist: if default inclusion disabled, need to force empty set of views
if (!ctxt.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)) {
views = NO_VIEWS;
}
}
// one more thing before adding to builder: copy any metadata
prop.setViews(views);
builder.addProperty(prop);
}
}
}
| protected void addBeanProps(DeserializationContext ctxt,
BeanDescription beanDesc, BeanDeserializerBuilder builder)
throws JsonMappingException
{
final SettableBeanProperty[] creatorProps =
builder.getValueInstantiator().getFromObjectArguments(ctxt.getConfig());
// Things specified as "ok to ignore"? [JACKSON-77]
AnnotationIntrospector intr = ctxt.getAnnotationIntrospector();
boolean ignoreAny = false;
{
Boolean B = intr.findIgnoreUnknownProperties(beanDesc.getClassInfo());
if (B != null) {
ignoreAny = B.booleanValue();
builder.setIgnoreUnknownProperties(ignoreAny);
}
}
// Or explicit/implicit definitions?
Set<String> ignored = ArrayBuilders.arrayToSet(intr.findPropertiesToIgnore(beanDesc.getClassInfo()));
for (String propName : ignored) {
builder.addIgnorable(propName);
}
// Also, do we have a fallback "any" setter?
AnnotatedMethod anySetter = beanDesc.findAnySetter();
if (anySetter != null) {
builder.setAnySetter(constructAnySetter(ctxt, beanDesc, anySetter));
}
// NOTE: we do NOT add @JsonIgnore'd properties into blocked ones if there's any setter
// Implicit ones via @JsonIgnore and equivalent?
if (anySetter == null) {
Collection<String> ignored2 = beanDesc.getIgnoredPropertyNames();
if (ignored2 != null) {
for (String propName : ignored2) {
// allow ignoral of similarly named JSON property, but do not force;
// latter means NOT adding this to 'ignored':
builder.addIgnorable(propName);
}
}
}
final boolean useGettersAsSetters = (ctxt.isEnabled(MapperFeature.USE_GETTERS_AS_SETTERS)
&& ctxt.isEnabled(MapperFeature.AUTO_DETECT_GETTERS));
// Ok: let's then filter out property definitions
List<BeanPropertyDefinition> propDefs = filterBeanProps(ctxt,
beanDesc, builder, beanDesc.findProperties(), ignored);
// After which we can let custom code change the set
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
propDefs = mod.updateProperties(ctxt.getConfig(), beanDesc, propDefs);
}
}
// At which point we still have all kinds of properties; not all with mutators:
for (BeanPropertyDefinition propDef : propDefs) {
SettableBeanProperty prop = null;
if (propDef.hasConstructorParameter()) {
/* [JACKSON-700] If property is passed via constructor parameter, we must
* handle things in special way. Not sure what is the most optimal way...
* for now, let's just call a (new) method in builder, which does nothing.
*/
// but let's call a method just to allow custom builders to be aware...
final String name = propDef.getName();
if (creatorProps != null) {
for (SettableBeanProperty cp : creatorProps) {
if (name.equals(cp.getName())) {
prop = cp;
break;
}
}
}
if (prop == null) {
throw ctxt.mappingException("Could not find creator property with name '"+name+"'");
}
builder.addCreatorProperty(prop);
continue;
}
if (propDef.hasSetter()) {
Type propertyType = propDef.getSetter().getGenericParameterType(0);
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (propDef.hasField()) {
Type propertyType = propDef.getField().getGenericType();
prop = constructSettableProperty(ctxt, beanDesc, propDef, propertyType);
} else if (useGettersAsSetters && propDef.hasGetter()) {
/* As per [JACKSON-88], may also need to consider getters
* for Map/Collection properties; but with lowest precedence
*/
AnnotatedMethod getter = propDef.getGetter();
// should only consider Collections and Maps, for now?
Class<?> rawPropertyType = getter.getRawType();
if (Collection.class.isAssignableFrom(rawPropertyType)
|| Map.class.isAssignableFrom(rawPropertyType)) {
prop = constructSetterlessProperty(ctxt, beanDesc, propDef);
}
}
if (prop != null) {
Class<?>[] views = propDef.findViews();
if (views == null) {
// one more twist: if default inclusion disabled, need to force empty set of views
if (!ctxt.isEnabled(MapperFeature.DEFAULT_VIEW_INCLUSION)) {
views = NO_VIEWS;
}
}
// one more thing before adding to builder: copy any metadata
prop.setViews(views);
builder.addProperty(prop);
}
}
}
|
diff --git a/webapp/src/test/java/test/unit/org/testinfected/petstore/dispatch/RoutingTableTest.java b/webapp/src/test/java/test/unit/org/testinfected/petstore/dispatch/RoutingTableTest.java
index 3d3d17f5..4fd0247e 100644
--- a/webapp/src/test/java/test/unit/org/testinfected/petstore/dispatch/RoutingTableTest.java
+++ b/webapp/src/test/java/test/unit/org/testinfected/petstore/dispatch/RoutingTableTest.java
@@ -1,50 +1,50 @@
package test.unit.org.testinfected.petstore.dispatch;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JMock;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.simpleframework.http.Request;
import org.testinfected.petstore.dispatch.Route;
import org.testinfected.petstore.dispatch.Router;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.hamcrest.MatcherAssert.assertThat;
@RunWith(JMock.class)
public class RoutingTableTest {
Mockery context = new JUnit4Mockery();
Router.RoutingTable routingTable = new Router.RoutingTable();
Request request = context.mock(Request.class);
Route inappropriateRoute = context.mock(Route.class, "inappropriate route");
Route preferredRoute = context.mock(Route.class, "preferred route");
Route alternateRoute = context.mock(Route.class, "alternate route");
Route defaultRoute = context.mock(Route.class, "default route");
@Test public void
routesToDefaultWhenNoAppropriateRouteExists() throws Exception {
context.checking(new Expectations() {{
allowing(inappropriateRoute).matches(with(same(request))); will(returnValue(false));
}});
routingTable.add(inappropriateRoute);
routingTable.setDefaultRoute(defaultRoute);
assertThat("route", routingTable.locateRoute(request), sameInstance(defaultRoute));
}
@Test public void
- dispatchesToFirstRouteAppropriateRoute() throws Exception {
+ dispatchesToFirstAppropriateRoute() throws Exception {
context.checking(new Expectations() {{
allowing(preferredRoute).matches(with(same(request))); will(returnValue(true));
allowing(alternateRoute).matches(with(same(request))); will(returnValue(true));
}});
routingTable.add(preferredRoute);
routingTable.add(alternateRoute);
assertThat("route", routingTable.locateRoute(request), sameInstance(preferredRoute));
}
}
| true | true | @Test public void
dispatchesToFirstRouteAppropriateRoute() throws Exception {
context.checking(new Expectations() {{
allowing(preferredRoute).matches(with(same(request))); will(returnValue(true));
allowing(alternateRoute).matches(with(same(request))); will(returnValue(true));
}});
routingTable.add(preferredRoute);
routingTable.add(alternateRoute);
assertThat("route", routingTable.locateRoute(request), sameInstance(preferredRoute));
}
| @Test public void
dispatchesToFirstAppropriateRoute() throws Exception {
context.checking(new Expectations() {{
allowing(preferredRoute).matches(with(same(request))); will(returnValue(true));
allowing(alternateRoute).matches(with(same(request))); will(returnValue(true));
}});
routingTable.add(preferredRoute);
routingTable.add(alternateRoute);
assertThat("route", routingTable.locateRoute(request), sameInstance(preferredRoute));
}
|
diff --git a/src/main/java/org/basex/tests/w3c/W3CTS.java b/src/main/java/org/basex/tests/w3c/W3CTS.java
index 5680cb4e8..3ab2254c8 100644
--- a/src/main/java/org/basex/tests/w3c/W3CTS.java
+++ b/src/main/java/org/basex/tests/w3c/W3CTS.java
@@ -1,804 +1,804 @@
package org.basex.tests.w3c;
import static org.basex.core.Text.*;
import static org.basex.util.Token.*;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import org.basex.core.*;
import org.basex.core.Context;
import org.basex.core.cmd.*;
import org.basex.data.*;
import org.basex.io.*;
import org.basex.io.in.*;
import org.basex.io.out.*;
import org.basex.io.serial.*;
import org.basex.query.*;
import org.basex.query.expr.*;
import org.basex.query.func.*;
import org.basex.query.item.*;
import org.basex.query.iter.*;
import org.basex.query.util.*;
import org.basex.util.*;
import org.basex.util.list.*;
/**
* XQuery Test Suite wrapper.
*
* @author BaseX Team 2005-12, BSD License
* @author Christian Gruen
*/
public abstract class W3CTS {
// Try "ulimit -n 65536" if Linux tells you "Too many open files."
/** Inspect flag. */
private static final byte[] INSPECT = token("Inspect");
/** Fragment flag. */
private static final byte[] FRAGMENT = token("Fragment");
/** XML flag. */
private static final byte[] XML = token("XML");
/** XML flag. */
private static final byte[] IGNORE = token("Ignore");
/** Replacement pattern. */
private static final Pattern SLASH = Pattern.compile("/", Pattern.LITERAL);
/** Database context. */
protected final Context context = new Context();
/** Path to the XQuery Test Suite. */
protected String path = "";
/** Data reference. */
protected Data data;
/** Log file. */
private final String pathlog;
/** Test suite input. */
private final String input;
/** Test suite id. */
private final String testid;
/** Query path. */
private String queries;
/** Expected results. */
private String expected;
/** Reported results. */
private String results;
/** Maximum length of result output. */
private int maxout = 500;
/** Query filter string. */
private String single;
/** Flag for printing current time functions into log file. */
private boolean currTime;
/** Flag for creating report files. */
private boolean reporting;
/** Verbose flag. */
private boolean verbose;
/** Minimum time in ms to include query in performance statistics. */
private int timer = Integer.MAX_VALUE;
/** Minimum conformance. */
private boolean minimum;
/** Print compilation steps. */
private boolean compile;
/** test-group to use. */
private String group;
/** Cached source files. */
private final HashMap<String, String> srcs = new HashMap<String, String>();
/** Cached module files. */
private final HashMap<String, String> mods = new HashMap<String, String>();
/** Cached collections. */
private final HashMap<String, String[]> colls = new HashMap<String, String[]>();
/** OK log. */
private final StringBuilder logOK = new StringBuilder();
/** OK log. */
private final StringBuilder logOK2 = new StringBuilder();
/** Error log. */
private final StringBuilder logErr = new StringBuilder();
/** Error log. */
private final StringBuilder logErr2 = new StringBuilder();
/** File log. */
private final StringBuilder logReport = new StringBuilder();
/** Error counter. */
private int err;
/** Error2 counter. */
private int err2;
/** OK counter. */
private int ok;
/** OK2 counter. */
private int ok2;
/**
* Constructor.
* @param nm name of test
*/
protected W3CTS(final String nm) {
input = nm + "Catalog" + IO.XMLSUFFIX;
testid = nm.substring(0, 4);
pathlog = testid.toLowerCase(Locale.ENGLISH) + ".log";
}
/**
* Runs the test suite.
* @param args command-line arguments
* @throws QueryException query exception
* @throws IOException I/O exception
*/
void run(final String[] args) throws QueryException, IOException {
try {
parseArguments(args);
} catch(final IOException ex) {
Util.errln(ex);
System.exit(1);
}
queries = path + "Queries/XQuery/";
expected = path + "ExpectedTestResults/";
results = path + "ReportingResults/Results/";
/* Reports. */
final String report = path + "ReportingResults/";
/* Test sources. */
final String sources = path + "TestSources/";
final Performance perf = new Performance();
context.prop.set(Prop.CHOP, false);
//new Check(path + input).execute(context);
data = CreateDB.mainMem(new IOFile(path + input), context);
final Nodes root = new Nodes(0, data);
Util.outln(NL + Util.name(this) + " Test Suite " +
text("/*:test-suite/@version", root));
Util.outln("Caching Sources...");
for(final int s : nodes("//*:source", root).list) {
final Nodes srcRoot = new Nodes(s, data);
final String val = (path + text("@FileName", srcRoot)).replace('\\', '/');
srcs.put(text("@ID", srcRoot), val);
}
Util.outln("Caching Modules...");
for(final int s : nodes("//*:module", root).list) {
final Nodes srcRoot = new Nodes(s, data);
final String val = (path + text("@FileName", srcRoot)).replace('\\', '/');
mods.put(text("@ID", srcRoot), val);
}
Util.outln("Caching Collections...");
for(final int c : nodes("//*:collection", root).list) {
final Nodes nodes = new Nodes(c, data);
final String cname = text("@ID", nodes);
final StringList dl = new StringList();
final Nodes doc = nodes("*:input-document", nodes);
for(int d = 0; d < doc.size(); ++d) {
dl.add(sources + string(data.atom(doc.list[d])) + IO.XMLSUFFIX);
}
colls.put(cname, dl.toArray());
}
init(root);
if(reporting) {
Util.outln("Delete old results...");
new IOFile(results).delete();
}
if(verbose) Util.outln();
final Nodes nodes = minimum ?
nodes("//*:test-group[starts-with(@name, 'Minim')]//*:test-case", root) :
group != null ? nodes("//*:test-group[@name eq '" + group +
"']//*:test-case", root) : nodes("//*:test-case", root);
long total = nodes.size();
Util.out("Parsing " + total + " Queries");
for(int t = 0; t < total; ++t) {
if(!parse(new Nodes(nodes.list[t], data))) break;
if(!verbose && t % 500 == 0) Util.out(".");
}
Util.outln();
total = ok + ok2 + err + err2;
final String time = perf.getTime();
Util.outln("Writing log file..." + NL);
PrintOutput po = new PrintOutput(path + pathlog);
po.println("TEST RESULTS ________________________________________________");
po.println(NL + "Total #Queries: " + total);
po.println("Correct / Empty Results: " + ok + " / " + ok2);
po.print("Conformance (w/Empty Results): ");
po.println(pc(ok, total) + " / " + pc(ok + ok2, total));
po.println("Wrong Results / Errors: " + err + " / " + err2 + NL);
po.println("WRONG _______________________________________________________");
po.print(NL + logErr);
po.println("WRONG (ERRORS) ______________________________________________");
po.print(NL + logErr2);
po.println("CORRECT? (EMPTY) ____________________________________________");
po.print(NL + logOK2);
po.println("CORRECT _____________________________________________________");
po.print(NL + logOK);
po.println("_____________________________________________________________");
po.close();
if(reporting) {
po = new PrintOutput(report + Prop.NAME + IO.XMLSUFFIX);
print(po, report + Prop.NAME + "Pre" + IO.XMLSUFFIX);
po.print(logReport.toString());
print(po, report + Prop.NAME + "Pos" + IO.XMLSUFFIX);
po.close();
}
Util.outln("Total #Queries: " + total);
Util.outln("Correct / Empty results: " + ok + " / " + ok2);
Util.out("Conformance (w/empty results): ");
Util.outln(pc(ok, total) + " / " + pc(ok + ok2, total));
Util.outln("Total Time: " + time);
context.close();
}
/**
* Calculates the percentage of correct queries.
* @param v value
* @param t total value
* @return percentage
*/
private static String pc(final int v, final long t) {
return (t == 0 ? 100 : v * 10000 / t / 100d) + "%";
}
/**
* Parses the specified test case.
* @param root root node
* @throws QueryException query exception
* @throws IOException I/O exception
* @return true if the query, specified by {@link #single}, was evaluated
*/
private boolean parse(final Nodes root) throws QueryException, IOException {
final String pth = text("@FilePath", root);
final String outname = text("@name", root);
if(single != null && !outname.startsWith(single)) return true;
final Performance perf = new Performance();
if(verbose) Util.out("- " + outname);
boolean inspect = false;
boolean correct = true;
final Nodes nodes = states(root);
for(int n = 0; n < nodes.size(); ++n) {
final Nodes state = new Nodes(nodes.list[n], nodes.data);
final String inname = text("*:query/@name", state);
final IOFile query = new IOFile(queries + pth + inname + IO.XQSUFFIX);
context.prop.set(Prop.QUERYPATH, query.path());
final String in = read(query);
String er = null;
ValueBuilder iter = null;
final Nodes cont = nodes("*:contextItem", state);
Nodes curr = null;
if(cont.size() != 0) {
- final Data d = Check.check(context,
- srcs.get(string(data.atom(cont.list[0]))));
+ final String p = srcs.get(string(data.atom(cont.list[0])));
+ final Data d = Check.check(context, p, null);
curr = new Nodes(d.resources.docs().toArray(), d);
curr.root = true;
}
context.prop.set(Prop.QUERYINFO, compile);
final QueryProcessor xq = new QueryProcessor(in, context).context(curr);
context.prop.set(Prop.QUERYINFO, false);
final ArrayOutput ao = new ArrayOutput();
final TokenBuilder files = new TokenBuilder();
try {
files.add(file(nodes("*:input-file", state),
nodes("*:input-file/@variable", state), xq, n == 0));
files.add(file(nodes("*:defaultCollection", state), null, xq, n == 0));
var(nodes("*:input-URI", state), nodes("*:input-URI/@variable", state), xq);
eval(nodes("*:input-query/@name", state),
nodes("*:input-query/@variable", state), pth, xq);
parse(xq, state);
for(final int p : nodes("*:module", root).list) {
final String uri = text("@namespace", new Nodes(p, data));
final String file = IO.get(mods.get(string(data.atom(p))) + IO.XQSUFFIX).path();
xq.module(uri, file);
}
// evaluate query
iter = xq.value().cache();
// serialize query
final SerializerProp sp = new SerializerProp();
sp.set(SerializerProp.S_INDENT, NO);
final Serializer ser = Serializer.get(ao, sp);
for(Item it; (it = iter.next()) != null;) it.serialize(ser);
ser.close();
} catch(final Exception ex) {
if(!(ex instanceof QueryException || ex instanceof IOException)) {
Util.errln("\n*** " + outname + " ***");
Util.errln(in + '\n');
Util.stack(ex);
}
er = ex.getMessage();
if(er.startsWith(STOPPED_AT)) er = er.substring(er.indexOf('\n') + 1);
if(!er.isEmpty() && er.charAt(0) == '[')
er = er.replaceAll("\\[(.*?)\\] (.*)", "$1 $2");
// unexpected error - dump stack trace
}
// print compilation steps
if(compile) {
Util.errln("---------------------------------------------------------");
Util.err(xq.info());
Util.errln(in);
}
final Nodes expOut = nodes("*:output-file/text()", state);
final TokenList result = new TokenList();
for(int o = 0; o < expOut.size(); ++o) {
final String resFile = string(data.atom(expOut.list[o]));
final IOFile exp = new IOFile(expected + pth + resFile);
result.add(read(exp).replaceAll("\r\n|\r|\n", Prop.NL));
}
final Nodes cmpFiles = nodes("*:output-file/@compare", state);
boolean xml = false;
boolean frag = false;
boolean ignore = false;
for(int o = 0; o < cmpFiles.size(); ++o) {
final byte[] type = data.atom(cmpFiles.list[o]);
xml |= eq(type, XML);
frag |= eq(type, FRAGMENT);
ignore |= eq(type, IGNORE);
}
String expError = text("*:expected-error/text()", state);
final StringBuilder log = new StringBuilder(pth + inname + IO.XQSUFFIX);
if(!files.isEmpty()) log.append(" [").append(files).append(']');
log.append(NL);
/** Remove comments. */
log.append(norm(in)).append(NL);
final String logStr = log.toString();
// skip queries with variable results
final boolean print = currTime || !logStr.contains("current-");
boolean correctError = false;
if(er != null && (expOut.size() == 0 || !expError.isEmpty())) {
expError = error(pth + outname, expError);
final String code = er.substring(0, Math.min(8, er.length()));
for(final String e : SLASH.split(expError)) {
if(code.equals(e)) {
correctError = true;
break;
}
}
}
if(correctError) {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(er));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + ".log", er);
}
++ok;
} else if(er == null) {
int s = -1;
final int rs = result.size();
while(!ignore && ++s < rs) {
inspect |= s < cmpFiles.list.length &&
eq(data.atom(cmpFiles.list[s]), INSPECT);
final String expect = string(result.get(s));
final String actual = ao.toString();
if(expect.equals(actual)) break;
if(xml || frag) {
iter.reset();
try {
final ValueBuilder vb = toIter(expect.replaceAll(
"^<\\?xml.*?\\?>", "").trim(), frag);
if(Compare.deep(iter, vb, null)) break;
vb.reset();
final ValueBuilder ia = toIter(actual, frag);
if(Compare.deep(ia, vb, null)) break;
} catch(final Throwable ex) {
Util.errln('\n' + outname + ':');
Util.stack(ex);
}
}
}
if((rs > 0 || !expError.isEmpty()) && s == rs && !inspect) {
if(print) {
if(expOut.size() == 0) result.add(error(pth + outname, expError));
logErr.append(logStr);
logErr.append('[' + testid + " ] ");
logErr.append(norm(string(result.get(0))));
logErr.append(NL);
logErr.append("[Wrong] ");
logErr.append(norm(ao.toString()));
logErr.append(NL);
logErr.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
correct = false;
++err;
} else {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(ao.toString()));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
++ok;
}
} else {
if(expOut.size() == 0 || !expError.isEmpty()) {
if(print) {
logOK2.append(logStr);
logOK2.append('[' + testid + " ] ");
logOK2.append(norm(expError));
logOK2.append(NL);
logOK2.append("[Rght?] ");
logOK2.append(norm(er));
logOK2.append(NL);
logOK2.append(NL);
addLog(pth, outname + ".log", er);
}
++ok2;
} else {
if(print) {
logErr2.append(logStr);
logErr2.append('[' + testid + " ] ");
logErr2.append(norm(string(result.get(0))));
logErr2.append(NL);
logErr2.append("[Wrong] ");
logErr2.append(norm(er));
logErr2.append(NL);
logErr2.append(NL);
addLog(pth, outname + ".log", er);
}
correct = false;
++err2;
}
}
if(curr != null) Close.close(curr.data, context);
xq.close();
}
if(reporting) {
logReport.append(" <test-case name=\"");
logReport.append(outname);
logReport.append("\" result='");
logReport.append(correct ? "pass" : "fail");
if(inspect) logReport.append("' todo='inspect");
logReport.append("'/>");
logReport.append(NL);
}
// print verbose/timing information
final long nano = perf.time();
final boolean slow = nano / 1000000 > timer;
if(verbose) {
if(slow) Util.out(": " + Performance.getTime(nano, 1));
Util.outln();
} else if(slow) {
Util.out(NL + "- " + outname + ": " + Performance.getTime(nano, 1));
}
return single == null || !outname.equals(single);
}
/**
* Creates an item iterator for the given XML fragment.
* @param xml fragment
* @param frag fragment flag
* @return iterator
*/
private ValueBuilder toIter(final String xml, final boolean frag) {
final ValueBuilder it = new ValueBuilder();
try {
final String str = frag ? "<X>" + xml + "</X>" : xml;
final Data d = CreateDB.mainMem(IO.get(str), context);
for(int p = frag ? 2 : 0; p < d.meta.size; p += d.size(p, d.kind(p))) {
it.add(new DBNode(d, p));
}
} catch(final IOException ex) {
return new ValueBuilder(
new Item[] { Str.get(Long.toString(System.nanoTime())) }, 1);
}
return it;
}
/**
* Removes comments from the specified string.
* @param in input string
* @return result
*/
private String norm(final String in) {
return QueryProcessor.removeComments(in, maxout);
}
/**
* Initializes the input files, specified by the context nodes.
* @param nod variables
* @param var documents
* @param qp query processor
* @param first call
* @return string with input files
* @throws QueryException query exception
* @throws BaseXException database exception
*/
private byte[] file(final Nodes nod, final Nodes var, final QueryProcessor qp,
final boolean first) throws QueryException, BaseXException {
final TokenBuilder tb = new TokenBuilder();
for(int c = 0; c < nod.size(); ++c) {
final byte[] nm = data.atom(nod.list[c]);
String src = srcs.get(string(nm));
if(!tb.isEmpty()) tb.add(", ");
tb.add(nm);
Expr expr = null;
if(src == null) {
// assign collection
expr = coll(nm, qp);
} else {
// assign document
final String dbname = new IOFile(src).dbname();
Function def = Function.DOC;
// updates: drop updated document or open updated database
if(updating()) {
if(first) {
new DropDB(dbname).execute(context);
} else {
def = Function._DB_OPEN;
src = dbname;
}
}
expr = def.get(null, Str.get(src));
}
if(var != null) qp.bind(string(data.atom(var.list[c])), expr);
}
return tb.finish();
}
/**
* Assigns the nodes to the specified variables.
* @param nod nodes
* @param var variables
* @param qp query processor
* @throws QueryException query exception
*/
private void var(final Nodes nod, final Nodes var, final QueryProcessor qp)
throws QueryException {
for(int c = 0; c < nod.size(); ++c) {
final byte[] nm = data.atom(nod.list[c]);
final String src = srcs.get(string(nm));
final Item it = src == null ? coll(nm, qp) : Str.get(src);
qp.bind(string(data.atom(var.list[c])), it);
}
}
/**
* Assigns a collection.
* @param name collection name
* @param qp query processor
* @return expression
* @throws QueryException query exception
*/
private Uri coll(final byte[] name, final QueryProcessor qp) throws QueryException {
qp.ctx.resource.addCollection(string(name), colls.get(string(name)));
return Uri.uri(name);
}
/**
* Evaluates the the input files and assigns the result to the specified
* variables.
* @param nod variables
* @param var documents
* @param pth file path
* @param qp query processor
* @throws QueryException query exception
*/
private void eval(final Nodes nod, final Nodes var, final String pth,
final QueryProcessor qp) throws QueryException {
for(int c = 0; c < nod.size(); ++c) {
final String file = pth + string(data.atom(nod.list[c])) + IO.XQSUFFIX;
final String in = read(new IOFile(queries + file));
final QueryProcessor xq = new QueryProcessor(in, context);
final Value val = xq.value();
qp.bind(string(data.atom(var.list[c])), val);
xq.close();
}
}
/**
* Adds a log file.
* @param pth file path
* @param nm file name
* @param msg message
* @throws IOException I/O exception
*/
private void addLog(final String pth, final String nm, final String msg)
throws IOException {
if(reporting) {
final File file = new File(results + pth);
if(!file.exists()) file.mkdirs();
final BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(results + pth + nm), UTF8));
bw.write(msg);
bw.close();
}
}
/**
* Returns an error message.
* @param nm test name
* @param error XQTS error
* @return error message
*/
private String error(final String nm, final String error) {
final String error2 = expected + nm + ".log";
final IO file = new IOFile(error2);
return file.exists() ? error + '/' + read(file) : error;
}
/**
* Returns the resulting query text (text node or attribute value).
* @param qu query
* @param root root node
* @return attribute value
* @throws QueryException query exception
*/
protected String text(final String qu, final Nodes root) throws QueryException {
final Nodes n = nodes(qu, root);
final TokenBuilder tb = new TokenBuilder();
for(int i = 0; i < n.size(); ++i) {
if(i != 0) tb.add('/');
tb.add(data.atom(n.list[i]));
}
return tb.toString();
}
/**
* Returns the resulting query nodes.
* @param qu query
* @param root root node
* @return attribute value
* @throws QueryException query exception
*/
protected Nodes nodes(final String qu, final Nodes root) throws QueryException {
return new QueryProcessor(qu, context).context(root).queryNodes();
}
/**
* Adds the specified file to the writer.
* @param po writer
* @param f file path
* @throws IOException I/O exception
*/
private static void print(final PrintOutput po, final String f) throws IOException {
final BufferedReader br = new BufferedReader(new FileReader(f));
for(String line; (line = br.readLine()) != null;) po.println(line);
br.close();
}
/**
* Returns the contents of the specified file.
* @param f file to be read
* @return content
*/
private static String read(final IO f) {
try {
return string(new NewlineInput(f).content());
} catch(final IOException ex) {
Util.errln(ex);
return "";
}
}
/**
* Initializes the test.
* @param root root nodes reference
* @throws QueryException query exception
*/
@SuppressWarnings("unused")
protected void init(final Nodes root) throws QueryException { }
/**
* Performs test specific parsings.
* @param qp query processor
* @param root root nodes reference
* @throws QueryException query exception
*/
@SuppressWarnings("unused")
protected void parse(final QueryProcessor qp, final Nodes root)
throws QueryException { }
/**
* Returns all query states.
* @param root root node
* @return states
* @throws QueryException query exception
*/
@SuppressWarnings("unused")
protected Nodes states(final Nodes root) throws QueryException {
return root;
}
/**
* Updating flag.
* @return flag
*/
protected boolean updating() {
return false;
}
/**
* Parses the command-line arguments, specified by the user.
* @param args command-line arguments
* @throws IOException I/O exception
*/
protected final void parseArguments(final String[] args) throws IOException {
final Args arg = new Args(args, this,
" [options] [pat]" + NL +
" [pat] perform tests starting with a pattern" + NL +
" -c print compilation steps" + NL +
" -C run tests depending on current time" + NL +
" -g <test-group> test group to test" + NL +
" -h show this help" + NL +
" -m minimum conformance" + NL +
" -p change path" + NL +
" -r create report" + NL +
" -t[ms] list slowest queries" + NL +
" -v verbose output", Util.info(CONSOLE, Util.name(this)));
while(arg.more()) {
if(arg.dash()) {
final char c = arg.next();
if(c == 'r') {
reporting = true;
currTime = true;
} else if(c == 'C') {
currTime = true;
} else if(c == 'c') {
compile = true;
} else if(c == 'm') {
minimum = true;
} else if(c == 'g') {
group = arg.string();
} else if(c == 'p') {
path = arg.string() + '/';
} else if(c == 't') {
timer = arg.number();
} else if(c == 'v') {
verbose = true;
} else {
arg.usage();
}
} else {
single = arg.string();
maxout = Integer.MAX_VALUE;
}
}
}
}
| true | true | private boolean parse(final Nodes root) throws QueryException, IOException {
final String pth = text("@FilePath", root);
final String outname = text("@name", root);
if(single != null && !outname.startsWith(single)) return true;
final Performance perf = new Performance();
if(verbose) Util.out("- " + outname);
boolean inspect = false;
boolean correct = true;
final Nodes nodes = states(root);
for(int n = 0; n < nodes.size(); ++n) {
final Nodes state = new Nodes(nodes.list[n], nodes.data);
final String inname = text("*:query/@name", state);
final IOFile query = new IOFile(queries + pth + inname + IO.XQSUFFIX);
context.prop.set(Prop.QUERYPATH, query.path());
final String in = read(query);
String er = null;
ValueBuilder iter = null;
final Nodes cont = nodes("*:contextItem", state);
Nodes curr = null;
if(cont.size() != 0) {
final Data d = Check.check(context,
srcs.get(string(data.atom(cont.list[0]))));
curr = new Nodes(d.resources.docs().toArray(), d);
curr.root = true;
}
context.prop.set(Prop.QUERYINFO, compile);
final QueryProcessor xq = new QueryProcessor(in, context).context(curr);
context.prop.set(Prop.QUERYINFO, false);
final ArrayOutput ao = new ArrayOutput();
final TokenBuilder files = new TokenBuilder();
try {
files.add(file(nodes("*:input-file", state),
nodes("*:input-file/@variable", state), xq, n == 0));
files.add(file(nodes("*:defaultCollection", state), null, xq, n == 0));
var(nodes("*:input-URI", state), nodes("*:input-URI/@variable", state), xq);
eval(nodes("*:input-query/@name", state),
nodes("*:input-query/@variable", state), pth, xq);
parse(xq, state);
for(final int p : nodes("*:module", root).list) {
final String uri = text("@namespace", new Nodes(p, data));
final String file = IO.get(mods.get(string(data.atom(p))) + IO.XQSUFFIX).path();
xq.module(uri, file);
}
// evaluate query
iter = xq.value().cache();
// serialize query
final SerializerProp sp = new SerializerProp();
sp.set(SerializerProp.S_INDENT, NO);
final Serializer ser = Serializer.get(ao, sp);
for(Item it; (it = iter.next()) != null;) it.serialize(ser);
ser.close();
} catch(final Exception ex) {
if(!(ex instanceof QueryException || ex instanceof IOException)) {
Util.errln("\n*** " + outname + " ***");
Util.errln(in + '\n');
Util.stack(ex);
}
er = ex.getMessage();
if(er.startsWith(STOPPED_AT)) er = er.substring(er.indexOf('\n') + 1);
if(!er.isEmpty() && er.charAt(0) == '[')
er = er.replaceAll("\\[(.*?)\\] (.*)", "$1 $2");
// unexpected error - dump stack trace
}
// print compilation steps
if(compile) {
Util.errln("---------------------------------------------------------");
Util.err(xq.info());
Util.errln(in);
}
final Nodes expOut = nodes("*:output-file/text()", state);
final TokenList result = new TokenList();
for(int o = 0; o < expOut.size(); ++o) {
final String resFile = string(data.atom(expOut.list[o]));
final IOFile exp = new IOFile(expected + pth + resFile);
result.add(read(exp).replaceAll("\r\n|\r|\n", Prop.NL));
}
final Nodes cmpFiles = nodes("*:output-file/@compare", state);
boolean xml = false;
boolean frag = false;
boolean ignore = false;
for(int o = 0; o < cmpFiles.size(); ++o) {
final byte[] type = data.atom(cmpFiles.list[o]);
xml |= eq(type, XML);
frag |= eq(type, FRAGMENT);
ignore |= eq(type, IGNORE);
}
String expError = text("*:expected-error/text()", state);
final StringBuilder log = new StringBuilder(pth + inname + IO.XQSUFFIX);
if(!files.isEmpty()) log.append(" [").append(files).append(']');
log.append(NL);
/** Remove comments. */
log.append(norm(in)).append(NL);
final String logStr = log.toString();
// skip queries with variable results
final boolean print = currTime || !logStr.contains("current-");
boolean correctError = false;
if(er != null && (expOut.size() == 0 || !expError.isEmpty())) {
expError = error(pth + outname, expError);
final String code = er.substring(0, Math.min(8, er.length()));
for(final String e : SLASH.split(expError)) {
if(code.equals(e)) {
correctError = true;
break;
}
}
}
if(correctError) {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(er));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + ".log", er);
}
++ok;
} else if(er == null) {
int s = -1;
final int rs = result.size();
while(!ignore && ++s < rs) {
inspect |= s < cmpFiles.list.length &&
eq(data.atom(cmpFiles.list[s]), INSPECT);
final String expect = string(result.get(s));
final String actual = ao.toString();
if(expect.equals(actual)) break;
if(xml || frag) {
iter.reset();
try {
final ValueBuilder vb = toIter(expect.replaceAll(
"^<\\?xml.*?\\?>", "").trim(), frag);
if(Compare.deep(iter, vb, null)) break;
vb.reset();
final ValueBuilder ia = toIter(actual, frag);
if(Compare.deep(ia, vb, null)) break;
} catch(final Throwable ex) {
Util.errln('\n' + outname + ':');
Util.stack(ex);
}
}
}
if((rs > 0 || !expError.isEmpty()) && s == rs && !inspect) {
if(print) {
if(expOut.size() == 0) result.add(error(pth + outname, expError));
logErr.append(logStr);
logErr.append('[' + testid + " ] ");
logErr.append(norm(string(result.get(0))));
logErr.append(NL);
logErr.append("[Wrong] ");
logErr.append(norm(ao.toString()));
logErr.append(NL);
logErr.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
correct = false;
++err;
} else {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(ao.toString()));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
++ok;
}
} else {
if(expOut.size() == 0 || !expError.isEmpty()) {
if(print) {
logOK2.append(logStr);
logOK2.append('[' + testid + " ] ");
logOK2.append(norm(expError));
logOK2.append(NL);
logOK2.append("[Rght?] ");
logOK2.append(norm(er));
logOK2.append(NL);
logOK2.append(NL);
addLog(pth, outname + ".log", er);
}
++ok2;
} else {
if(print) {
logErr2.append(logStr);
logErr2.append('[' + testid + " ] ");
logErr2.append(norm(string(result.get(0))));
logErr2.append(NL);
logErr2.append("[Wrong] ");
logErr2.append(norm(er));
logErr2.append(NL);
logErr2.append(NL);
addLog(pth, outname + ".log", er);
}
correct = false;
++err2;
}
}
if(curr != null) Close.close(curr.data, context);
xq.close();
}
if(reporting) {
logReport.append(" <test-case name=\"");
logReport.append(outname);
logReport.append("\" result='");
logReport.append(correct ? "pass" : "fail");
if(inspect) logReport.append("' todo='inspect");
logReport.append("'/>");
logReport.append(NL);
}
// print verbose/timing information
final long nano = perf.time();
final boolean slow = nano / 1000000 > timer;
if(verbose) {
if(slow) Util.out(": " + Performance.getTime(nano, 1));
Util.outln();
} else if(slow) {
Util.out(NL + "- " + outname + ": " + Performance.getTime(nano, 1));
}
return single == null || !outname.equals(single);
}
| private boolean parse(final Nodes root) throws QueryException, IOException {
final String pth = text("@FilePath", root);
final String outname = text("@name", root);
if(single != null && !outname.startsWith(single)) return true;
final Performance perf = new Performance();
if(verbose) Util.out("- " + outname);
boolean inspect = false;
boolean correct = true;
final Nodes nodes = states(root);
for(int n = 0; n < nodes.size(); ++n) {
final Nodes state = new Nodes(nodes.list[n], nodes.data);
final String inname = text("*:query/@name", state);
final IOFile query = new IOFile(queries + pth + inname + IO.XQSUFFIX);
context.prop.set(Prop.QUERYPATH, query.path());
final String in = read(query);
String er = null;
ValueBuilder iter = null;
final Nodes cont = nodes("*:contextItem", state);
Nodes curr = null;
if(cont.size() != 0) {
final String p = srcs.get(string(data.atom(cont.list[0])));
final Data d = Check.check(context, p, null);
curr = new Nodes(d.resources.docs().toArray(), d);
curr.root = true;
}
context.prop.set(Prop.QUERYINFO, compile);
final QueryProcessor xq = new QueryProcessor(in, context).context(curr);
context.prop.set(Prop.QUERYINFO, false);
final ArrayOutput ao = new ArrayOutput();
final TokenBuilder files = new TokenBuilder();
try {
files.add(file(nodes("*:input-file", state),
nodes("*:input-file/@variable", state), xq, n == 0));
files.add(file(nodes("*:defaultCollection", state), null, xq, n == 0));
var(nodes("*:input-URI", state), nodes("*:input-URI/@variable", state), xq);
eval(nodes("*:input-query/@name", state),
nodes("*:input-query/@variable", state), pth, xq);
parse(xq, state);
for(final int p : nodes("*:module", root).list) {
final String uri = text("@namespace", new Nodes(p, data));
final String file = IO.get(mods.get(string(data.atom(p))) + IO.XQSUFFIX).path();
xq.module(uri, file);
}
// evaluate query
iter = xq.value().cache();
// serialize query
final SerializerProp sp = new SerializerProp();
sp.set(SerializerProp.S_INDENT, NO);
final Serializer ser = Serializer.get(ao, sp);
for(Item it; (it = iter.next()) != null;) it.serialize(ser);
ser.close();
} catch(final Exception ex) {
if(!(ex instanceof QueryException || ex instanceof IOException)) {
Util.errln("\n*** " + outname + " ***");
Util.errln(in + '\n');
Util.stack(ex);
}
er = ex.getMessage();
if(er.startsWith(STOPPED_AT)) er = er.substring(er.indexOf('\n') + 1);
if(!er.isEmpty() && er.charAt(0) == '[')
er = er.replaceAll("\\[(.*?)\\] (.*)", "$1 $2");
// unexpected error - dump stack trace
}
// print compilation steps
if(compile) {
Util.errln("---------------------------------------------------------");
Util.err(xq.info());
Util.errln(in);
}
final Nodes expOut = nodes("*:output-file/text()", state);
final TokenList result = new TokenList();
for(int o = 0; o < expOut.size(); ++o) {
final String resFile = string(data.atom(expOut.list[o]));
final IOFile exp = new IOFile(expected + pth + resFile);
result.add(read(exp).replaceAll("\r\n|\r|\n", Prop.NL));
}
final Nodes cmpFiles = nodes("*:output-file/@compare", state);
boolean xml = false;
boolean frag = false;
boolean ignore = false;
for(int o = 0; o < cmpFiles.size(); ++o) {
final byte[] type = data.atom(cmpFiles.list[o]);
xml |= eq(type, XML);
frag |= eq(type, FRAGMENT);
ignore |= eq(type, IGNORE);
}
String expError = text("*:expected-error/text()", state);
final StringBuilder log = new StringBuilder(pth + inname + IO.XQSUFFIX);
if(!files.isEmpty()) log.append(" [").append(files).append(']');
log.append(NL);
/** Remove comments. */
log.append(norm(in)).append(NL);
final String logStr = log.toString();
// skip queries with variable results
final boolean print = currTime || !logStr.contains("current-");
boolean correctError = false;
if(er != null && (expOut.size() == 0 || !expError.isEmpty())) {
expError = error(pth + outname, expError);
final String code = er.substring(0, Math.min(8, er.length()));
for(final String e : SLASH.split(expError)) {
if(code.equals(e)) {
correctError = true;
break;
}
}
}
if(correctError) {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(er));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + ".log", er);
}
++ok;
} else if(er == null) {
int s = -1;
final int rs = result.size();
while(!ignore && ++s < rs) {
inspect |= s < cmpFiles.list.length &&
eq(data.atom(cmpFiles.list[s]), INSPECT);
final String expect = string(result.get(s));
final String actual = ao.toString();
if(expect.equals(actual)) break;
if(xml || frag) {
iter.reset();
try {
final ValueBuilder vb = toIter(expect.replaceAll(
"^<\\?xml.*?\\?>", "").trim(), frag);
if(Compare.deep(iter, vb, null)) break;
vb.reset();
final ValueBuilder ia = toIter(actual, frag);
if(Compare.deep(ia, vb, null)) break;
} catch(final Throwable ex) {
Util.errln('\n' + outname + ':');
Util.stack(ex);
}
}
}
if((rs > 0 || !expError.isEmpty()) && s == rs && !inspect) {
if(print) {
if(expOut.size() == 0) result.add(error(pth + outname, expError));
logErr.append(logStr);
logErr.append('[' + testid + " ] ");
logErr.append(norm(string(result.get(0))));
logErr.append(NL);
logErr.append("[Wrong] ");
logErr.append(norm(ao.toString()));
logErr.append(NL);
logErr.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
correct = false;
++err;
} else {
if(print) {
logOK.append(logStr);
logOK.append("[Right] ");
logOK.append(norm(ao.toString()));
logOK.append(NL);
logOK.append(NL);
addLog(pth, outname + (xml ? IO.XMLSUFFIX : ".txt"), ao.toString());
}
++ok;
}
} else {
if(expOut.size() == 0 || !expError.isEmpty()) {
if(print) {
logOK2.append(logStr);
logOK2.append('[' + testid + " ] ");
logOK2.append(norm(expError));
logOK2.append(NL);
logOK2.append("[Rght?] ");
logOK2.append(norm(er));
logOK2.append(NL);
logOK2.append(NL);
addLog(pth, outname + ".log", er);
}
++ok2;
} else {
if(print) {
logErr2.append(logStr);
logErr2.append('[' + testid + " ] ");
logErr2.append(norm(string(result.get(0))));
logErr2.append(NL);
logErr2.append("[Wrong] ");
logErr2.append(norm(er));
logErr2.append(NL);
logErr2.append(NL);
addLog(pth, outname + ".log", er);
}
correct = false;
++err2;
}
}
if(curr != null) Close.close(curr.data, context);
xq.close();
}
if(reporting) {
logReport.append(" <test-case name=\"");
logReport.append(outname);
logReport.append("\" result='");
logReport.append(correct ? "pass" : "fail");
if(inspect) logReport.append("' todo='inspect");
logReport.append("'/>");
logReport.append(NL);
}
// print verbose/timing information
final long nano = perf.time();
final boolean slow = nano / 1000000 > timer;
if(verbose) {
if(slow) Util.out(": " + Performance.getTime(nano, 1));
Util.outln();
} else if(slow) {
Util.out(NL + "- " + outname + ": " + Performance.getTime(nano, 1));
}
return single == null || !outname.equals(single);
}
|
diff --git a/src/main/java/net/aufdemrand/denizen/utilities/GetRequirements.java b/src/main/java/net/aufdemrand/denizen/utilities/GetRequirements.java
index da4825646..f37ba8b0c 100644
--- a/src/main/java/net/aufdemrand/denizen/utilities/GetRequirements.java
+++ b/src/main/java/net/aufdemrand/denizen/utilities/GetRequirements.java
@@ -1,223 +1,223 @@
package net.aufdemrand.denizen.utilities;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import net.aufdemrand.denizen.Denizen;
import net.aufdemrand.denizen.command.core.FailCommand;
import net.aufdemrand.denizen.command.core.FinishCommand;
import net.citizensnpcs.command.exception.RequirementMissingException;
import org.bukkit.Bukkit;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
public class GetRequirements {
public enum RequirementMode {
NONE, ALL, ANY
}
public enum Requirement {
NONE, NAME, WEARING, ITEM, HOLDING, TIME, PRECIPITATION, ACTIVITY, FINISHED, SCRIPT, FAILED,
STORMY, SUNNY, HUNGER, WORLD, PERMISSION, LEVEL, GROUP, MONEY, POTIONEFFECT, PRECIPITATING,
STORMING, DURABILITY
}
private Denizen plugin;
public GetRequirements(Denizen denizen) {
plugin = denizen;
}
public boolean check(String theScript, LivingEntity theEntity, boolean isPlayer) throws RequirementMissingException {
String requirementMode = plugin.getScripts().getString(theScript + ".Requirements.Mode");
List<String> requirementList = plugin.getScripts().getStringList(theScript + ".Requirements.List");
/* No requirements met yet, we just started! */
int numberMet = 0;
boolean negativeRequirement;
/* Requirements list null? This script is probably named wrong, or doesn't exist! */
- if (requirementList == null) {
+ if (requirementList == null || requirementMode == null) {
if (plugin.debugMode) plugin.getLogger().log(Level.INFO, "...no requirements found! This script may be named incorrectly, or simply doesn't exist!");
return false;
}
/* Requirement node "NONE"? No requirements in the LIST? No need to continue, return TRUE */
if (requirementMode.equals("NONE") || requirementList.isEmpty()) return true;
for (String requirementEntry : requirementList) {
/* Check if this is a Negative Requirement */
if (requirementEntry.startsWith("-")) {
negativeRequirement = true;
requirementEntry = requirementEntry.substring(1);
}
else negativeRequirement = false;
String[] arguments = new String[25];
String[] argumentPopulator = requirementEntry.split(" ");
for (int count = 0; count < 25; count++) {
if (argumentPopulator.length > count) arguments[count] = argumentPopulator[count];
else arguments[count] = null;
}
try {
switch (Requirement.valueOf(arguments[0].toUpperCase())) {
case NONE:
return true;
case TIME: // (-)TIME [DAWN|DAY|DUSK|NIGHT] or (-)TIME [#] [#]
if (plugin.getWorld.checkTime(theEntity.getWorld(), arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case STORMING: case STORMY: case PRECIPITATING: case PRECIPITATION: // (-)PRECIPITATION
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "PRECIPITATION", negativeRequirement)) numberMet++;
break;
case SUNNY: // (-)SUNNY
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "SUNNY", negativeRequirement)) numberMet++;
break;
case HUNGER: // (-)HUNGER [FULL|HUNGRY|STARVING]
if (plugin.getPlayer.checkSaturation((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case LEVEL: // (-)LEVEL [#] (#)
if (plugin.getPlayer.checkLevel((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WORLD: // (-)WORLD [List of Worlds]
List<String> theWorlds = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theWorlds.add(arg.toUpperCase());
theWorlds.remove(0); /* Remove the command from the list */
if (plugin.getWorld.checkWorld(theEntity, theWorlds, negativeRequirement)) numberMet++;
break;
case NAME: // (-)Name [List of Names]
List<String> theNames = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theNames.add(arg.toUpperCase());
theNames.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkName((Player) theEntity, theNames, negativeRequirement)) numberMet++;
break;
case MONEY: // (-)MONEY [# or more]
if (plugin.getPlayer.checkFunds((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case ITEM: // (-)ITEM [ITEM_NAME|#:#] (# or more)
String[] itemArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkInventory((Player) theEntity, itemArgs[0], itemArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case HOLDING: // (-)HOLDING [ITEM_NAME|#:#] (# or more)
String[] holdingArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkHand((Player) theEntity, holdingArgs[0], holdingArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WEARING: // (-) WEARING [ITEM_NAME|#]
if (plugin.getPlayer.checkArmor((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case POTIONEFFECT: // (-)POTIONEFFECT [List of POITION_TYPESs]
List<String> thePotions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePotions.add(arg.toUpperCase());
thePotions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkEffects((Player) theEntity, thePotions, negativeRequirement)) numberMet++;
break;
case FINISHED:
case SCRIPT: // (-)FINISHED (#) [Script Name]
if (plugin.getCommandRegistry().getCommand(FinishCommand.class).getScriptCompletes((Player) theEntity, requirementEntry.split(" ", 2)[1], requirementEntry.split(" ", 3)[1], negativeRequirement)) numberMet++;
break;
case FAILED: // (-)SCRIPT [Script Name]
if (plugin.getCommandRegistry().getCommand(FailCommand.class).getScriptFail((Player) theEntity, requirementEntry.split(" ", 2)[1], negativeRequirement)) numberMet++;
break;
case GROUP:
List<String> theGroups = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theGroups.add(arg);
theGroups.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkGroups((Player) theEntity, theGroups, negativeRequirement)) numberMet++;
break;
case PERMISSION: // (-)PERMISSION [this.permission.node]
List<String> thePermissions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePermissions.add(arg);
thePermissions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkPermissions((Player) theEntity, thePermissions, negativeRequirement)) numberMet++;
break;
case DURABILITY: // (-)DURABILITY [>,<,=] [#|#%]
if (plugin.getPlayer.checkDurability((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
}
} catch (Throwable e) {
if (plugin.showStackTraces) plugin.getLogger().info(e.getMessage());
if (plugin.showStackTraces) e.printStackTrace();
throw new RequirementMissingException(e.getMessage());
}
}
/* Check numberMet */
if (requirementMode.equalsIgnoreCase("ALL")
&& numberMet == requirementList.size()) return true;
String[] ModeArgs = requirementMode.split(" ");
if (ModeArgs[0].equalsIgnoreCase("ANY")
&& numberMet >= Integer.parseInt(ModeArgs[1])) return true;
/* Nothing met, return FALSE */
return false;
}
/*
* Converts a string with the format #:# (TypeId:Data) to a String[]
*
* Element [0] -- TypeId
* Element [1] -- Data
*/
public String[] splitItem(String theItemWithData) {
String[] itemArgs = new String[2];
if (theItemWithData.split(":", 2).length == 1) {
itemArgs[0] = theItemWithData;
itemArgs[1] = null;
}
else {
itemArgs[0] = theItemWithData.split(":", 2)[0];
itemArgs[1] = theItemWithData.split(":", 2)[1];
}
return itemArgs;
}
}
| true | true | public boolean check(String theScript, LivingEntity theEntity, boolean isPlayer) throws RequirementMissingException {
String requirementMode = plugin.getScripts().getString(theScript + ".Requirements.Mode");
List<String> requirementList = plugin.getScripts().getStringList(theScript + ".Requirements.List");
/* No requirements met yet, we just started! */
int numberMet = 0;
boolean negativeRequirement;
/* Requirements list null? This script is probably named wrong, or doesn't exist! */
if (requirementList == null) {
if (plugin.debugMode) plugin.getLogger().log(Level.INFO, "...no requirements found! This script may be named incorrectly, or simply doesn't exist!");
return false;
}
/* Requirement node "NONE"? No requirements in the LIST? No need to continue, return TRUE */
if (requirementMode.equals("NONE") || requirementList.isEmpty()) return true;
for (String requirementEntry : requirementList) {
/* Check if this is a Negative Requirement */
if (requirementEntry.startsWith("-")) {
negativeRequirement = true;
requirementEntry = requirementEntry.substring(1);
}
else negativeRequirement = false;
String[] arguments = new String[25];
String[] argumentPopulator = requirementEntry.split(" ");
for (int count = 0; count < 25; count++) {
if (argumentPopulator.length > count) arguments[count] = argumentPopulator[count];
else arguments[count] = null;
}
try {
switch (Requirement.valueOf(arguments[0].toUpperCase())) {
case NONE:
return true;
case TIME: // (-)TIME [DAWN|DAY|DUSK|NIGHT] or (-)TIME [#] [#]
if (plugin.getWorld.checkTime(theEntity.getWorld(), arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case STORMING: case STORMY: case PRECIPITATING: case PRECIPITATION: // (-)PRECIPITATION
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "PRECIPITATION", negativeRequirement)) numberMet++;
break;
case SUNNY: // (-)SUNNY
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "SUNNY", negativeRequirement)) numberMet++;
break;
case HUNGER: // (-)HUNGER [FULL|HUNGRY|STARVING]
if (plugin.getPlayer.checkSaturation((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case LEVEL: // (-)LEVEL [#] (#)
if (plugin.getPlayer.checkLevel((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WORLD: // (-)WORLD [List of Worlds]
List<String> theWorlds = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theWorlds.add(arg.toUpperCase());
theWorlds.remove(0); /* Remove the command from the list */
if (plugin.getWorld.checkWorld(theEntity, theWorlds, negativeRequirement)) numberMet++;
break;
case NAME: // (-)Name [List of Names]
List<String> theNames = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theNames.add(arg.toUpperCase());
theNames.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkName((Player) theEntity, theNames, negativeRequirement)) numberMet++;
break;
case MONEY: // (-)MONEY [# or more]
if (plugin.getPlayer.checkFunds((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case ITEM: // (-)ITEM [ITEM_NAME|#:#] (# or more)
String[] itemArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkInventory((Player) theEntity, itemArgs[0], itemArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case HOLDING: // (-)HOLDING [ITEM_NAME|#:#] (# or more)
String[] holdingArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkHand((Player) theEntity, holdingArgs[0], holdingArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WEARING: // (-) WEARING [ITEM_NAME|#]
if (plugin.getPlayer.checkArmor((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case POTIONEFFECT: // (-)POTIONEFFECT [List of POITION_TYPESs]
List<String> thePotions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePotions.add(arg.toUpperCase());
thePotions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkEffects((Player) theEntity, thePotions, negativeRequirement)) numberMet++;
break;
case FINISHED:
case SCRIPT: // (-)FINISHED (#) [Script Name]
if (plugin.getCommandRegistry().getCommand(FinishCommand.class).getScriptCompletes((Player) theEntity, requirementEntry.split(" ", 2)[1], requirementEntry.split(" ", 3)[1], negativeRequirement)) numberMet++;
break;
case FAILED: // (-)SCRIPT [Script Name]
if (plugin.getCommandRegistry().getCommand(FailCommand.class).getScriptFail((Player) theEntity, requirementEntry.split(" ", 2)[1], negativeRequirement)) numberMet++;
break;
case GROUP:
List<String> theGroups = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theGroups.add(arg);
theGroups.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkGroups((Player) theEntity, theGroups, negativeRequirement)) numberMet++;
break;
case PERMISSION: // (-)PERMISSION [this.permission.node]
List<String> thePermissions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePermissions.add(arg);
thePermissions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkPermissions((Player) theEntity, thePermissions, negativeRequirement)) numberMet++;
break;
case DURABILITY: // (-)DURABILITY [>,<,=] [#|#%]
if (plugin.getPlayer.checkDurability((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
}
} catch (Throwable e) {
if (plugin.showStackTraces) plugin.getLogger().info(e.getMessage());
if (plugin.showStackTraces) e.printStackTrace();
throw new RequirementMissingException(e.getMessage());
}
}
/* Check numberMet */
if (requirementMode.equalsIgnoreCase("ALL")
&& numberMet == requirementList.size()) return true;
String[] ModeArgs = requirementMode.split(" ");
if (ModeArgs[0].equalsIgnoreCase("ANY")
&& numberMet >= Integer.parseInt(ModeArgs[1])) return true;
/* Nothing met, return FALSE */
return false;
}
| public boolean check(String theScript, LivingEntity theEntity, boolean isPlayer) throws RequirementMissingException {
String requirementMode = plugin.getScripts().getString(theScript + ".Requirements.Mode");
List<String> requirementList = plugin.getScripts().getStringList(theScript + ".Requirements.List");
/* No requirements met yet, we just started! */
int numberMet = 0;
boolean negativeRequirement;
/* Requirements list null? This script is probably named wrong, or doesn't exist! */
if (requirementList == null || requirementMode == null) {
if (plugin.debugMode) plugin.getLogger().log(Level.INFO, "...no requirements found! This script may be named incorrectly, or simply doesn't exist!");
return false;
}
/* Requirement node "NONE"? No requirements in the LIST? No need to continue, return TRUE */
if (requirementMode.equals("NONE") || requirementList.isEmpty()) return true;
for (String requirementEntry : requirementList) {
/* Check if this is a Negative Requirement */
if (requirementEntry.startsWith("-")) {
negativeRequirement = true;
requirementEntry = requirementEntry.substring(1);
}
else negativeRequirement = false;
String[] arguments = new String[25];
String[] argumentPopulator = requirementEntry.split(" ");
for (int count = 0; count < 25; count++) {
if (argumentPopulator.length > count) arguments[count] = argumentPopulator[count];
else arguments[count] = null;
}
try {
switch (Requirement.valueOf(arguments[0].toUpperCase())) {
case NONE:
return true;
case TIME: // (-)TIME [DAWN|DAY|DUSK|NIGHT] or (-)TIME [#] [#]
if (plugin.getWorld.checkTime(theEntity.getWorld(), arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case STORMING: case STORMY: case PRECIPITATING: case PRECIPITATION: // (-)PRECIPITATION
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "PRECIPITATION", negativeRequirement)) numberMet++;
break;
case SUNNY: // (-)SUNNY
if (plugin.getWorld.checkWeather(theEntity.getWorld(), "SUNNY", negativeRequirement)) numberMet++;
break;
case HUNGER: // (-)HUNGER [FULL|HUNGRY|STARVING]
if (plugin.getPlayer.checkSaturation((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case LEVEL: // (-)LEVEL [#] (#)
if (plugin.getPlayer.checkLevel((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WORLD: // (-)WORLD [List of Worlds]
List<String> theWorlds = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theWorlds.add(arg.toUpperCase());
theWorlds.remove(0); /* Remove the command from the list */
if (plugin.getWorld.checkWorld(theEntity, theWorlds, negativeRequirement)) numberMet++;
break;
case NAME: // (-)Name [List of Names]
List<String> theNames = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theNames.add(arg.toUpperCase());
theNames.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkName((Player) theEntity, theNames, negativeRequirement)) numberMet++;
break;
case MONEY: // (-)MONEY [# or more]
if (plugin.getPlayer.checkFunds((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case ITEM: // (-)ITEM [ITEM_NAME|#:#] (# or more)
String[] itemArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkInventory((Player) theEntity, itemArgs[0], itemArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case HOLDING: // (-)HOLDING [ITEM_NAME|#:#] (# or more)
String[] holdingArgs = splitItem(arguments[1]);
if (plugin.getPlayer.checkHand((Player) theEntity, holdingArgs[0], holdingArgs[1], arguments[2], negativeRequirement)) numberMet++;
break;
case WEARING: // (-) WEARING [ITEM_NAME|#]
if (plugin.getPlayer.checkArmor((Player) theEntity, arguments[1], negativeRequirement)) numberMet++;
break;
case POTIONEFFECT: // (-)POTIONEFFECT [List of POITION_TYPESs]
List<String> thePotions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePotions.add(arg.toUpperCase());
thePotions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkEffects((Player) theEntity, thePotions, negativeRequirement)) numberMet++;
break;
case FINISHED:
case SCRIPT: // (-)FINISHED (#) [Script Name]
if (plugin.getCommandRegistry().getCommand(FinishCommand.class).getScriptCompletes((Player) theEntity, requirementEntry.split(" ", 2)[1], requirementEntry.split(" ", 3)[1], negativeRequirement)) numberMet++;
break;
case FAILED: // (-)SCRIPT [Script Name]
if (plugin.getCommandRegistry().getCommand(FailCommand.class).getScriptFail((Player) theEntity, requirementEntry.split(" ", 2)[1], negativeRequirement)) numberMet++;
break;
case GROUP:
List<String> theGroups = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) theGroups.add(arg);
theGroups.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkGroups((Player) theEntity, theGroups, negativeRequirement)) numberMet++;
break;
case PERMISSION: // (-)PERMISSION [this.permission.node]
List<String> thePermissions = new LinkedList<String>(); // = Arrays.asList(arguments);
for(String arg : arguments) if (arg != null) thePermissions.add(arg);
thePermissions.remove(0); /* Remove the command from the list */
if (plugin.getPlayer.checkPermissions((Player) theEntity, thePermissions, negativeRequirement)) numberMet++;
break;
case DURABILITY: // (-)DURABILITY [>,<,=] [#|#%]
if (plugin.getPlayer.checkDurability((Player) theEntity, arguments[1], arguments[2], negativeRequirement)) numberMet++;
break;
}
} catch (Throwable e) {
if (plugin.showStackTraces) plugin.getLogger().info(e.getMessage());
if (plugin.showStackTraces) e.printStackTrace();
throw new RequirementMissingException(e.getMessage());
}
}
/* Check numberMet */
if (requirementMode.equalsIgnoreCase("ALL")
&& numberMet == requirementList.size()) return true;
String[] ModeArgs = requirementMode.split(" ");
if (ModeArgs[0].equalsIgnoreCase("ANY")
&& numberMet >= Integer.parseInt(ModeArgs[1])) return true;
/* Nothing met, return FALSE */
return false;
}
|
diff --git a/src/dgu/bufsizing/control/EventProcessor.java b/src/dgu/bufsizing/control/EventProcessor.java
index 17e1569..4bda173 100644
--- a/src/dgu/bufsizing/control/EventProcessor.java
+++ b/src/dgu/bufsizing/control/EventProcessor.java
@@ -1,199 +1,202 @@
package dgu.bufsizing.control;
import dgu.bufsizing.BottleneckLink;
import dgu.bufsizing.DemoGUI;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.SocketException;
/**
* Process event capture packets.
* @author David Underhill
*/
public class EventProcessor extends Thread {
/** default event capture port */
public static final int DEFAULT_EVCAP_PORT = 27033;
/** maximum length of a datagram */
private static final int MAX_PACKET_LEN = 1500;
/** number of queues in the header */
private static final int NUM_QUEUES = 8;
/** the port to listen on for event capture packets */
private int port;
/** socket to use for listening */
private DatagramSocket dsocket;
public EventProcessor( int port ) {
this.port = port;
/* establish a socket for the stats port */
try {
dsocket = new DatagramSocket(port);
} catch( SocketException e ) {
System.err.println( "Error: UDP socket setup failed for Event Processor Thread: " + e.getMessage() );
System.exit( 1 );
return;
}
}
/**
* Listens for new event capture packets and processes them with the
* assumption that they are for the first bottleneck on the first router.
*/
public void run() {
byte[] buf = new byte[MAX_PACKET_LEN];
DatagramPacket packet = new DatagramPacket(buf, buf.length);
/* listen for updates until the end of time */
while (true) {
try {
packet.setLength(buf.length);
dsocket.receive(packet);
/* extract the stats (assume router 0) */
EventProcessor.handleEventCapPacket( 0, buf );
} catch( IOException e ) {
System.err.println( "Error: UDP stats receive failed: " + e.getMessage() );
System.exit( 1 );
}
}
}
/** defines event type codes */
private static enum EventType {
TYPE_TS((byte)0),
TYPE_ARRIVE((byte)1),
TYPE_DEPART((byte)2),
TYPE_DROP((byte)3);
EventType( final byte t ) {
type = t;
}
public final byte type;
}
/** reads byte i to i+3 to form an int */
private static int extractInt( byte[] buf, int i ) {
// convert to signed ints, clearing any bits set due to sign extension
int a = buf[i] & 0x000000FF;
int b = buf[i+1] & 0x000000FF;
int c = buf[i+2] & 0x000000FF;
int d = buf[i+3] & 0x000000FF;
// create the int
return a<<24 | b<<16 | c<<8 | d;
}
/** reads byte i to i+3 to form a long (use for unsigned ints which may use the MSB) */
private static long extractUintAsLong( byte[] buf, int i ) {
long ret = 0;
// convert to signed ints, clearing any bits set due to sign extension
int a = buf[i] & 0x000000FF;
int b = buf[i+1] & 0x000000FF;
int c = buf[i+2] & 0x000000FF;
int d = buf[i+3] & 0x000000FF;
// create the int
ret = ((long)a)<<24 | b<<16 | c<<8 | d;
// clear any sign extended bits
return ret & 0x00000000FFFFFFFFL;
}
private static long extractTimestamp( byte[] buf, int i ) {
long upper = extractUintAsLong(buf, i);
long lower = extractUintAsLong(buf, i+4);
return upper<<32L | lower;
}
private static final void debug_println( String s ) {
//System.err.println( s );
}
/**
* Processes a buffer containing an event capture packet.
* @param routerIndex index of the router the data belongs to
* @param buf datagram containing an event capture payload
*/
public static void handleEventCapPacket( int routerIndex, byte[] buf ) {
// always assume first bottleneck for now
BottleneckLink b = DemoGUI.me.demo.getRouters().get(routerIndex).getBottleneckLinkAt(0);
// start processing at byte 1 (byte 0 isn't too interesting)
int index = 1;
int num_events = buf[index] & 0xFF; /* cast to an int so we properly interpret values > 127 */
index += 1;
// skip the sequence number
debug_println( "seq = " + extractInt(buf, index) );
index += 4;
// get the timestamp before the queue data
long timestamp_8ns = extractTimestamp( buf, 70 );
if( !b.prepareForUpdate( timestamp_8ns ) ) {
debug_println( "old timestamp (ignoring) " + timestamp_8ns );
return; // old, out-of-order packet
}
else
debug_println( "got new timestamp " + timestamp_8ns );
// get queue occupancy data
for( int i=0; i<NUM_QUEUES; i++ ) {
// update the queue with its new absolute value
- if( i == 2 ) // only handle NF2C1 for now
- b.setOccupancy( timestamp_8ns, 8 * extractInt(buf, index) );
+ if( i == 2 ) { // only handle NF2C1 for now
+ int num_bytes = 8 * extractInt(buf, index);
+ b.setOccupancy( timestamp_8ns, num_bytes );
+ debug_println( "queue 2 set to " + num_bytes + "B" );
+ }
index += 4;
//skip size in packets
index += 4;
}
// already got the timestamp; keep going
index += 8;
// process each event
long timestamp_adjusted_8ns = timestamp_8ns;
for( int i=0; i<num_events; i++ ) {
int type = (buf[index] & 0xC0) >> 6;
debug_println( " got type = " + Integer.toHexString(type) );
if( type == EventType.TYPE_TS.type ) {
timestamp_8ns = extractTimestamp( buf, index );
index += 8;
debug_println( " got timestamp " + timestamp_8ns );
}
else {
// determine the # of bytes involved and the offset
int val = extractInt( buf, index );
//System.err.println( " got bytes for shorty: " + Integer.toHexString(val) );
int queue_id = (val & 0x38000000) >> 27;
int plen_bytes = ((val & 0x07F80000) >> 19) * 8 - 8; /* - 8 to not include NetFPGA overhead */
timestamp_adjusted_8ns = timestamp_8ns + (val & 0x0007FFFF);
index += 4;
debug_println( " got short event " + type + " (" + plen_bytes + "B) at timestamp " + timestamp_adjusted_8ns + " for queue " + queue_id );
if( queue_id != 2 ) {
// only pay attention to NF2C1 for now
debug_println( " ignoring event for queue " + queue_id );
continue;
}
if( type == EventType.TYPE_ARRIVE.type )
b.arrival( timestamp_adjusted_8ns, plen_bytes );
- if( type == EventType.TYPE_ARRIVE.type )
+ else if( type == EventType.TYPE_DEPART.type )
b.departure( timestamp_adjusted_8ns, plen_bytes );
else
b.dropped( timestamp_adjusted_8ns, plen_bytes );
}
}
// refresh instantaneous readings over the interval from the previous
// update to the time of the last event in this update
b.refreshInstantaneousValues( timestamp_adjusted_8ns );
}
}
| false | true | public static void handleEventCapPacket( int routerIndex, byte[] buf ) {
// always assume first bottleneck for now
BottleneckLink b = DemoGUI.me.demo.getRouters().get(routerIndex).getBottleneckLinkAt(0);
// start processing at byte 1 (byte 0 isn't too interesting)
int index = 1;
int num_events = buf[index] & 0xFF; /* cast to an int so we properly interpret values > 127 */
index += 1;
// skip the sequence number
debug_println( "seq = " + extractInt(buf, index) );
index += 4;
// get the timestamp before the queue data
long timestamp_8ns = extractTimestamp( buf, 70 );
if( !b.prepareForUpdate( timestamp_8ns ) ) {
debug_println( "old timestamp (ignoring) " + timestamp_8ns );
return; // old, out-of-order packet
}
else
debug_println( "got new timestamp " + timestamp_8ns );
// get queue occupancy data
for( int i=0; i<NUM_QUEUES; i++ ) {
// update the queue with its new absolute value
if( i == 2 ) // only handle NF2C1 for now
b.setOccupancy( timestamp_8ns, 8 * extractInt(buf, index) );
index += 4;
//skip size in packets
index += 4;
}
// already got the timestamp; keep going
index += 8;
// process each event
long timestamp_adjusted_8ns = timestamp_8ns;
for( int i=0; i<num_events; i++ ) {
int type = (buf[index] & 0xC0) >> 6;
debug_println( " got type = " + Integer.toHexString(type) );
if( type == EventType.TYPE_TS.type ) {
timestamp_8ns = extractTimestamp( buf, index );
index += 8;
debug_println( " got timestamp " + timestamp_8ns );
}
else {
// determine the # of bytes involved and the offset
int val = extractInt( buf, index );
//System.err.println( " got bytes for shorty: " + Integer.toHexString(val) );
int queue_id = (val & 0x38000000) >> 27;
int plen_bytes = ((val & 0x07F80000) >> 19) * 8 - 8; /* - 8 to not include NetFPGA overhead */
timestamp_adjusted_8ns = timestamp_8ns + (val & 0x0007FFFF);
index += 4;
debug_println( " got short event " + type + " (" + plen_bytes + "B) at timestamp " + timestamp_adjusted_8ns + " for queue " + queue_id );
if( queue_id != 2 ) {
// only pay attention to NF2C1 for now
debug_println( " ignoring event for queue " + queue_id );
continue;
}
if( type == EventType.TYPE_ARRIVE.type )
b.arrival( timestamp_adjusted_8ns, plen_bytes );
if( type == EventType.TYPE_ARRIVE.type )
b.departure( timestamp_adjusted_8ns, plen_bytes );
else
b.dropped( timestamp_adjusted_8ns, plen_bytes );
}
}
// refresh instantaneous readings over the interval from the previous
// update to the time of the last event in this update
b.refreshInstantaneousValues( timestamp_adjusted_8ns );
}
| public static void handleEventCapPacket( int routerIndex, byte[] buf ) {
// always assume first bottleneck for now
BottleneckLink b = DemoGUI.me.demo.getRouters().get(routerIndex).getBottleneckLinkAt(0);
// start processing at byte 1 (byte 0 isn't too interesting)
int index = 1;
int num_events = buf[index] & 0xFF; /* cast to an int so we properly interpret values > 127 */
index += 1;
// skip the sequence number
debug_println( "seq = " + extractInt(buf, index) );
index += 4;
// get the timestamp before the queue data
long timestamp_8ns = extractTimestamp( buf, 70 );
if( !b.prepareForUpdate( timestamp_8ns ) ) {
debug_println( "old timestamp (ignoring) " + timestamp_8ns );
return; // old, out-of-order packet
}
else
debug_println( "got new timestamp " + timestamp_8ns );
// get queue occupancy data
for( int i=0; i<NUM_QUEUES; i++ ) {
// update the queue with its new absolute value
if( i == 2 ) { // only handle NF2C1 for now
int num_bytes = 8 * extractInt(buf, index);
b.setOccupancy( timestamp_8ns, num_bytes );
debug_println( "queue 2 set to " + num_bytes + "B" );
}
index += 4;
//skip size in packets
index += 4;
}
// already got the timestamp; keep going
index += 8;
// process each event
long timestamp_adjusted_8ns = timestamp_8ns;
for( int i=0; i<num_events; i++ ) {
int type = (buf[index] & 0xC0) >> 6;
debug_println( " got type = " + Integer.toHexString(type) );
if( type == EventType.TYPE_TS.type ) {
timestamp_8ns = extractTimestamp( buf, index );
index += 8;
debug_println( " got timestamp " + timestamp_8ns );
}
else {
// determine the # of bytes involved and the offset
int val = extractInt( buf, index );
//System.err.println( " got bytes for shorty: " + Integer.toHexString(val) );
int queue_id = (val & 0x38000000) >> 27;
int plen_bytes = ((val & 0x07F80000) >> 19) * 8 - 8; /* - 8 to not include NetFPGA overhead */
timestamp_adjusted_8ns = timestamp_8ns + (val & 0x0007FFFF);
index += 4;
debug_println( " got short event " + type + " (" + plen_bytes + "B) at timestamp " + timestamp_adjusted_8ns + " for queue " + queue_id );
if( queue_id != 2 ) {
// only pay attention to NF2C1 for now
debug_println( " ignoring event for queue " + queue_id );
continue;
}
if( type == EventType.TYPE_ARRIVE.type )
b.arrival( timestamp_adjusted_8ns, plen_bytes );
else if( type == EventType.TYPE_DEPART.type )
b.departure( timestamp_adjusted_8ns, plen_bytes );
else
b.dropped( timestamp_adjusted_8ns, plen_bytes );
}
}
// refresh instantaneous readings over the interval from the previous
// update to the time of the last event in this update
b.refreshInstantaneousValues( timestamp_adjusted_8ns );
}
|
diff --git a/P2aktuell/src/ro/inf/p2/uebung03/Pali.java b/P2aktuell/src/ro/inf/p2/uebung03/Pali.java
index d89c892..1022af1 100644
--- a/P2aktuell/src/ro/inf/p2/uebung03/Pali.java
+++ b/P2aktuell/src/ro/inf/p2/uebung03/Pali.java
@@ -1,23 +1,22 @@
package ro.inf.p2.uebung03;
/**
* Created with IntelliJ IDEA.
* User: felix
* Date: 4/10/13
* Time: 2:14 PM
* Palindrom
*/
public class Pali {
public static String filter(String s) {
- s = s.toLowerCase();
- s = s.replaceAll("[^a-z]+", "");
+ s = s.toLowerCase().replaceAll("[^a-z]+", "");
return s;
}
public static boolean isPalindrome(String s) {
s = filter(s);
return s.equals(new StringBuffer(s).reverse().toString());
}
}
| true | true | public static String filter(String s) {
s = s.toLowerCase();
s = s.replaceAll("[^a-z]+", "");
return s;
}
| public static String filter(String s) {
s = s.toLowerCase().replaceAll("[^a-z]+", "");
return s;
}
|
diff --git a/library/src/main/java/org/deri/any23/extractor/rdfa/RDFaExtractor.java b/library/src/main/java/org/deri/any23/extractor/rdfa/RDFaExtractor.java
index 09b1126e..374c1047 100644
--- a/library/src/main/java/org/deri/any23/extractor/rdfa/RDFaExtractor.java
+++ b/library/src/main/java/org/deri/any23/extractor/rdfa/RDFaExtractor.java
@@ -1,83 +1,84 @@
package org.deri.any23.extractor.rdfa;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.Arrays;
import org.deri.any23.extractor.ExtractionException;
import org.deri.any23.extractor.ExtractionResult;
import org.deri.any23.extractor.ExtractorDescription;
import org.deri.any23.extractor.ExtractorFactory;
import org.deri.any23.extractor.SimpleExtractorFactory;
import org.deri.any23.extractor.Extractor.TagSoupDOMExtractor;
import org.deri.any23.extractor.rdf.RDFHandlerAdapter;
import org.openrdf.model.URI;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.rdfxml.RDFXMLParser;
import org.w3c.dom.Document;
/**
* Extractor for RDFa in HTML, based on Fabien Gadon's XSLT transform, found
* <a href="http://ns.inria.fr/grddl/rdfa/">here</a>. It works by first
* parsing the HTML using a tagsoup parser, then applies the XSLT to the
* DOM tree, then parses the resulting RDF/XML.
*
* TODO: Add configuration option for wether to add standard HTML triples
* (which result from rel="stylesheet" and such)
*
* @author Gabriele Renzi
* @author Richard Cyganiak ([email protected])
*/
public class RDFaExtractor implements TagSoupDOMExtractor {
public final static String NAME = "html-rdfa";
private final static String xsltFilename = "rdfa.xslt";
private static XSLTStylesheet xslt = null;
public void run(Document in, URI documentURI, ExtractionResult out)
throws IOException, ExtractionException {
StringWriter buffer = new StringWriter();
getXSLT().applyTo(in, buffer);
// System.out.println(buffer);
try {
RDFParser parser = new RDFXMLParser();
parser.setRDFHandler(new RDFHandlerAdapter(out));
parser.parse(
new StringReader(buffer.getBuffer().toString()),
documentURI.stringValue());
} catch (RDFHandlerException ex) {
throw new RuntimeException("Should not happen, RDFHandlerAdapter does not throw RDFHandlerException", ex);
} catch (RDFParseException ex) {
- throw new ExtractionException(ex);
+// System.err.println(buffer.getBuffer().toString());
+ throw new ExtractionException("Invalid RDF/XML produced by RDFa transform: " + ex.getMessage(), ex);
}
}
private synchronized XSLTStylesheet getXSLT() {
// Lazily initialized static instance, so we don't parse
// the XSLT unless really necessary, and only once
if (xslt == null) {
InputStream in = RDFaExtractor.class.getResourceAsStream(xsltFilename);
if (in == null) {
throw new RuntimeException("Couldn't load '" + xsltFilename +
"', maybe the file is not bundled in the jar?");
}
xslt = new XSLTStylesheet(in);
}
return xslt;
}
public ExtractorDescription getDescription() {
return factory;
}
public final static ExtractorFactory<RDFaExtractor> factory =
SimpleExtractorFactory.create(
NAME,
null,
Arrays.asList("text/html;q=0.3", "application/xhtml+xml;q=0.3"),
null,
RDFaExtractor.class);
}
| true | true | public void run(Document in, URI documentURI, ExtractionResult out)
throws IOException, ExtractionException {
StringWriter buffer = new StringWriter();
getXSLT().applyTo(in, buffer);
// System.out.println(buffer);
try {
RDFParser parser = new RDFXMLParser();
parser.setRDFHandler(new RDFHandlerAdapter(out));
parser.parse(
new StringReader(buffer.getBuffer().toString()),
documentURI.stringValue());
} catch (RDFHandlerException ex) {
throw new RuntimeException("Should not happen, RDFHandlerAdapter does not throw RDFHandlerException", ex);
} catch (RDFParseException ex) {
throw new ExtractionException(ex);
}
}
| public void run(Document in, URI documentURI, ExtractionResult out)
throws IOException, ExtractionException {
StringWriter buffer = new StringWriter();
getXSLT().applyTo(in, buffer);
// System.out.println(buffer);
try {
RDFParser parser = new RDFXMLParser();
parser.setRDFHandler(new RDFHandlerAdapter(out));
parser.parse(
new StringReader(buffer.getBuffer().toString()),
documentURI.stringValue());
} catch (RDFHandlerException ex) {
throw new RuntimeException("Should not happen, RDFHandlerAdapter does not throw RDFHandlerException", ex);
} catch (RDFParseException ex) {
// System.err.println(buffer.getBuffer().toString());
throw new ExtractionException("Invalid RDF/XML produced by RDFa transform: " + ex.getMessage(), ex);
}
}
|
diff --git a/src/org/opensolaris/opengrok/index/IndexDatabase.java b/src/org/opensolaris/opengrok/index/IndexDatabase.java
index 181017c..95c8255 100644
--- a/src/org/opensolaris/opengrok/index/IndexDatabase.java
+++ b/src/org/opensolaris/opengrok/index/IndexDatabase.java
@@ -1,614 +1,615 @@
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* See LICENSE.txt included in this distribution for the specific
* language governing permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at LICENSE.txt.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
package org.opensolaris.opengrok.index;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.document.DateTools;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.spell.LuceneDictionary;
import org.apache.lucene.search.spell.SpellChecker;
import org.apache.lucene.store.FSDirectory;
import org.opensolaris.opengrok.analysis.AnalyzerGuru;
import org.opensolaris.opengrok.analysis.FileAnalyzer;
import org.opensolaris.opengrok.analysis.FileAnalyzer.Genre;
import org.opensolaris.opengrok.configuration.Project;
import org.opensolaris.opengrok.configuration.RuntimeEnvironment;
import org.opensolaris.opengrok.web.Util;
/**
* This class is used to create / update the index databases. Currently we use
* one index database per project.
*
* @author Trond Norbye
*/
public class IndexDatabase {
private Project project;
private FSDirectory indexDirectory;
private FSDirectory spellDirectory;
private IndexWriter writer;
private IndexReader reader;
private TermEnum uidIter;
private IgnoredNames ignoredNames;
private AnalyzerGuru analyzerGuru;
private File xrefDir;
private boolean interrupted;
private List<IndexChangedListener> listeners;
private File dirtyFile;
private boolean dirty;
/**
* Create a new instance of the Index Database. Use this constructor if
* you don't use any projects
*
* @throws java.io.IOException if an error occurs while creating directories
*/
public IndexDatabase() throws IOException {
initialize();
}
/**
* Create a new instance of an Index Database for a given project
* @param project the project to create the database for
* @throws java.io.IOException if an errror occurs while creating directories
*/
public IndexDatabase(Project project) throws IOException {
this.project = project;
initialize();
}
/**
* Update the index database for all of the projects. Print progress to
* standard out.
* @throws java.lang.Exception if an error occurs
*/
public static void updateAll() throws Exception {
updateAll(null);
}
/**
* Update the index database for all of the projects
* @param listener where to signal the changes to the database
* @throws java.lang.Exception if an error occurs
*/
static void updateAll(IndexChangedListener listener) throws Exception {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
if (env.hasProjects()) {
for (Project project : env.getProjects()) {
IndexDatabase db = new IndexDatabase(project);
if (listener != null) {
db.addIndexChangedListener(listener);
}
db.update();
}
} else {
IndexDatabase db = new IndexDatabase();
if (listener != null) {
db.addIndexChangedListener(listener);
}
db.update();
}
}
private void initialize() throws IOException {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
File indexDir = new File(env.getDataRootFile(), "index");
File spellDir = new File(env.getDataRootFile(), "spellIndex");
if (project != null) {
indexDir = new File(indexDir, project.getPath());
spellDir = new File(spellDir, project.getPath());
}
if (!indexDir.exists() || !spellDir.exists()) {
indexDir.mkdirs();
spellDir.mkdirs();
// to avoid race conditions, just recheck..
if (!indexDir.exists()) {
throw new FileNotFoundException("Failed to create root directory [" + indexDir.getAbsolutePath() + "]");
}
if (!spellDir.exists()) {
throw new FileNotFoundException("Failed to create root directory [" + spellDir.getAbsolutePath() + "]");
}
}
indexDirectory = FSDirectory.getDirectory(indexDir);
spellDirectory = FSDirectory.getDirectory(spellDir);
ignoredNames = env.getIgnoredNames();
analyzerGuru = new AnalyzerGuru();
if (RuntimeEnvironment.getInstance().isGenerateHtml()) {
xrefDir = new File(env.getDataRootFile(), "xref");
}
listeners = new ArrayList<IndexChangedListener>();
dirtyFile = new File(indexDir, "dirty");
dirty = dirtyFile.exists();
}
/**
* Update the content of this index database
* @throws java.lang.Exception if an error occurs
*/
public synchronized void update() throws Exception {
interrupted = false;
try {
writer = new IndexWriter(indexDirectory, AnalyzerGuru.getAnalyzer());
+ writer.setMaxFieldLength(RuntimeEnvironment.getInstance().getIndexWordLimit());
String root;
File sourceRoot;
if (project != null) {
root = project.getPath();
sourceRoot = new File(RuntimeEnvironment.getInstance().getSourceRootFile(), project.getPath());
} else {
root = "";
sourceRoot = RuntimeEnvironment.getInstance().getSourceRootFile();
}
String startuid = Util.uid(root, "");
reader = IndexReader.open(indexDirectory); // open existing index
uidIter = reader.terms(new Term("u", startuid)); // init uid iterator
indexDown(sourceRoot, root);
while (uidIter.term() != null && uidIter.term().field().equals("u") && uidIter.term().text().startsWith(startuid)) {
removeFile();
uidIter.next();
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
}
}
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
}
}
}
if (!interrupted && dirty) {
if (RuntimeEnvironment.getInstance().isOptimizeDatabase()) {
optimize();
}
createSpellingSuggestions();
}
}
/**
* Optimize the index database
*/
public void optimize() {
IndexWriter wrt = null;
try {
if (RuntimeEnvironment.getInstance().isVerbose()) {
System.out.print("Optimizing the index ... ");
}
wrt = new IndexWriter(indexDirectory, null, false);
wrt.optimize();
if (RuntimeEnvironment.getInstance().isVerbose()) {
System.out.println("done");
}
dirtyFile.delete();
dirty = false;
} catch (IOException e) {
System.err.println("ERROR: optimizing index: " + e);
} finally {
if (wrt != null) {
try {
wrt.close();
} catch (IOException e) {
}
}
}
}
/**
* Generate a spelling suggestion for the definitions stored in defs
*/
public void createSpellingSuggestions() {
IndexReader indexReader = null;
SpellChecker checker = null;
try {
if (RuntimeEnvironment.getInstance().isVerbose()) {
System.out.print("Generating spelling suggestion index ... ");
}
indexReader = IndexReader.open(indexDirectory);
checker = new SpellChecker(spellDirectory);
checker.indexDictionary(new LuceneDictionary(indexReader, "defs"));
if (RuntimeEnvironment.getInstance().isVerbose()) {
System.out.println("done");
}
} catch (IOException e) {
System.err.println("ERROR: Generating spelling: " + e);
} finally {
if (indexReader != null) {
try {
indexReader.close();
} catch (IOException e) {
}
}
if (spellDirectory != null) {
spellDirectory.close();
}
}
}
private void setDirty() {
try {
if (!dirty) {
dirtyFile.createNewFile();
dirty = true;
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Remove a stale file (uidIter.term().text()) from the index database
* (and the xref file)
* @throws java.io.IOException if an error occurs
*/
private void removeFile() throws IOException {
String path = Util.uid2url(uidIter.term().text());
for (IndexChangedListener listener : listeners) {
listener.fileRemoved(path);
}
writer.deleteDocuments(uidIter.term());
File xrefFile = new File(xrefDir, path);
xrefFile.delete();
xrefFile.getParentFile().delete();
setDirty();
}
/**
* Add a file to the Lucene index (and generate a xref file)
* @param file The file to add
* @param path The path to the file (from source root)
* @throws java.io.IOException if an error occurs
*/
private void addFile(File file, String path) throws IOException {
InputStream in;
try {
in = new BufferedInputStream(new FileInputStream(file));
} catch (IOException ex) {
System.err.println("Warning: " + ex.getMessage());
return;
}
FileAnalyzer fa = AnalyzerGuru.getAnalyzer(in, path);
for (IndexChangedListener listener : listeners) {
listener.fileAdded(path, fa.getClass().getSimpleName());
}
Document d = analyzerGuru.getDocument(file, in, path, fa);
if (d != null) {
writer.addDocument(d, fa);
Genre g = fa.getFactory().getGenre();
if (xrefDir != null && (g == Genre.PLAIN || g == Genre.XREFABLE)) {
File xrefFile = new File(xrefDir, path);
xrefFile.getParentFile().mkdirs();
fa.writeXref(xrefDir, path);
}
setDirty();
} else {
System.err.println("Warning: did not add " + path);
}
try { in.close(); } catch (Exception e) {}
}
/**
* Check if I should accept this file into the index database
* @param file the file to check
* @return true if the file should be included, false otherwise
*/
private boolean accept(File file) {
if (ignoredNames.ignore(file)) {
return false;
}
if (!file.canRead()) {
System.err.println("Warning: could not read " + file.getAbsolutePath());
return false;
}
try {
if (!file.getAbsolutePath().equals(file.getCanonicalPath())) {
System.err.println("Warning: ignored link " + file.getAbsolutePath() +
" -> " + file.getCanonicalPath());
return false;
}
} catch (IOException exp) {
System.err.println("Warning: Failed to resolve name: " + file.getAbsolutePath());
exp.printStackTrace();
}
return true;
}
/**
* Generate indexes recursively
* @param dir the root indexDirectory to generate indexes for
* @param path the path
*/
private void indexDown(File dir, String parent) throws IOException {
if (interrupted) {
return;
}
if (!accept(dir)) {
return;
}
File[] files = dir.listFiles();
if (files == null) {
System.err.println("Failed to get file listing for: " + dir.getAbsolutePath());
return;
}
Arrays.sort(files);
for (File file : files) {
if (accept(file)) {
String path = parent + '/' + file.getName();
if (file.isDirectory()) {
indexDown(file, path);
} else {
if (uidIter != null) {
String uid = Util.uid(path, DateTools.timeToString(file.lastModified(), DateTools.Resolution.MILLISECOND)); // construct uid for doc
while (uidIter.term() != null && uidIter.term().field().equals("u") &&
uidIter.term().text().compareTo(uid) < 0) {
removeFile();
uidIter.next();
}
if (uidIter.term() != null && uidIter.term().field().equals("u") &&
uidIter.term().text().compareTo(uid) == 0) {
uidIter.next(); // keep matching docs
} else {
addFile(file, path);
}
} else {
addFile(file, path);
}
}
}
}
}
/**
* Interrupt the index generation (and the index generation will stop as
* soon as possible)
*/
public void interrupt() {
interrupted = true;
}
/**
* Register an object to receive events when modifications is done to the
* index database.
*
* @param listener the object to receive the events
*/
void addIndexChangedListener(IndexChangedListener listener) {
listeners.add(listener);
}
/**
* Remove an object from the lists of objects to receive events when
* modifications is done to the index database
*
* @param listener the object to remove
*/
void removeIndexChangedListener(IndexChangedListener listener) {
listeners.remove(listener);
}
/**
* List all files in all of the index databases
* @throws java.lang.Exception if an error occurs
*/
public static void listAllFiles() throws Exception {
listAllFiles(null);
}
/**
* List all files in some of the index databases
* @param subFiles Subdirectories for the various projects to list the files
* for (or null or an empty list to dump all projects)
* @throws java.lang.Exception if an error occurs
*/
public static void listAllFiles(List<String> subFiles) throws Exception {
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
if (!env.hasProjects()) {
IndexDatabase db = new IndexDatabase();
db.listFiles();
} else {
if (subFiles == null || subFiles.isEmpty()) {
for (Project project : env.getProjects()) {
IndexDatabase db = new IndexDatabase(project);
db.listFiles();
}
} else {
for (String path : subFiles) {
Project project = Project.getProject(path);
if (project == null) {
System.err.println("Warning: Could not find a project for \"" + path + "\"");
} else {
IndexDatabase db = new IndexDatabase(project);
db.listFiles();
}
}
}
}
}
/**
* List all of the files in this index database
*
* @throws java.lang.Exception if an error occurs
*/
public void listFiles() throws Exception {
IndexReader ireader = null;
TermEnum iter = null;
try {
ireader = IndexReader.open(indexDirectory); // open existing index
iter = ireader.terms(new Term("u", "")); // init uid iterator
while (iter.term() != null) {
System.out.println(Util.uid2url(iter.term().text()));
iter.next();
}
} finally {
if (iter != null) {
try {
iter.close();
} catch (Exception e) {
}
}
if (ireader != null) {
try {
ireader.close();
} catch (Exception e) {
}
}
}
}
static void listFrequentTokens() throws Exception {
listFrequentTokens(null);
}
static void listFrequentTokens(ArrayList<String> subFiles) throws Exception {
final int limit = 4;
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
if (!env.hasProjects()) {
IndexDatabase db = new IndexDatabase();
db.listTokens(limit);
} else {
if (subFiles == null || subFiles.isEmpty()) {
for (Project project : env.getProjects()) {
IndexDatabase db = new IndexDatabase(project);
db.listTokens(4);
}
} else {
for (String path : subFiles) {
Project project = Project.getProject(path);
if (project == null) {
System.err.println("Warning: Could not find a project for \"" + path + "\"");
} else {
IndexDatabase db = new IndexDatabase(project);
db.listTokens(4);
}
}
}
}
}
public void listTokens(int freq) throws Exception {
IndexReader ireader = null;
TermEnum iter = null;
try {
ireader = IndexReader.open(indexDirectory);
iter = ireader.terms(new Term("defs", ""));
while (iter.term() != null) {
if (iter.term().field().startsWith("f")) {
if (iter.docFreq() > 16 && iter.term().text().length() > freq) {
System.out.println(iter.term().text());
}
iter.next();
} else {
break;
}
}
} finally {
if (iter != null) {
try {
iter.close();
} catch (Exception e) {
}
}
if (ireader != null) {
try {
ireader.close();
} catch (Exception e) {
}
}
}
}
/**
* Get an indexReader for the Index database where a given file
* @param path the file to get the database for
* @return The index database where the file should be located or null if
* it cannot be located.
*/
public static IndexReader getIndexReader(String path) {
IndexReader ret = null;
RuntimeEnvironment env = RuntimeEnvironment.getInstance();
File indexDir = new File(env.getDataRootFile(), "index");
if (env.hasProjects()) {
Project p = Project.getProject(path);
if (p != null) {
indexDir = new File(indexDir, p.getPath());
} else {
return null;
}
}
if (indexDir.exists() && IndexReader.indexExists(indexDir)) {
try {
ret = IndexReader.open(indexDir);
} catch (Exception ex) {
System.err.println("Failed to open index: " + indexDir.getAbsolutePath());
ex.printStackTrace();
}
}
return ret;
}
}
| true | true | public synchronized void update() throws Exception {
interrupted = false;
try {
writer = new IndexWriter(indexDirectory, AnalyzerGuru.getAnalyzer());
String root;
File sourceRoot;
if (project != null) {
root = project.getPath();
sourceRoot = new File(RuntimeEnvironment.getInstance().getSourceRootFile(), project.getPath());
} else {
root = "";
sourceRoot = RuntimeEnvironment.getInstance().getSourceRootFile();
}
String startuid = Util.uid(root, "");
reader = IndexReader.open(indexDirectory); // open existing index
uidIter = reader.terms(new Term("u", startuid)); // init uid iterator
indexDown(sourceRoot, root);
while (uidIter.term() != null && uidIter.term().field().equals("u") && uidIter.term().text().startsWith(startuid)) {
removeFile();
uidIter.next();
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
}
}
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
}
}
}
if (!interrupted && dirty) {
if (RuntimeEnvironment.getInstance().isOptimizeDatabase()) {
optimize();
}
createSpellingSuggestions();
}
}
| public synchronized void update() throws Exception {
interrupted = false;
try {
writer = new IndexWriter(indexDirectory, AnalyzerGuru.getAnalyzer());
writer.setMaxFieldLength(RuntimeEnvironment.getInstance().getIndexWordLimit());
String root;
File sourceRoot;
if (project != null) {
root = project.getPath();
sourceRoot = new File(RuntimeEnvironment.getInstance().getSourceRootFile(), project.getPath());
} else {
root = "";
sourceRoot = RuntimeEnvironment.getInstance().getSourceRootFile();
}
String startuid = Util.uid(root, "");
reader = IndexReader.open(indexDirectory); // open existing index
uidIter = reader.terms(new Term("u", startuid)); // init uid iterator
indexDown(sourceRoot, root);
while (uidIter.term() != null && uidIter.term().field().equals("u") && uidIter.term().text().startsWith(startuid)) {
removeFile();
uidIter.next();
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
}
}
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
}
}
}
if (!interrupted && dirty) {
if (RuntimeEnvironment.getInstance().isOptimizeDatabase()) {
optimize();
}
createSpellingSuggestions();
}
}
|
diff --git a/src/java/org/apache/commons/codec/language/bm/PhoneticEngine.java b/src/java/org/apache/commons/codec/language/bm/PhoneticEngine.java
index b3681c2e..89960592 100644
--- a/src/java/org/apache/commons/codec/language/bm/PhoneticEngine.java
+++ b/src/java/org/apache/commons/codec/language/bm/PhoneticEngine.java
@@ -1,402 +1,401 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec.language.bm;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
/**
* <p>
* Converts words into potential phonetic representations.
* </p>
* <p>
* This is a two-stage process. Firstly, the word is converted into a phonetic representation that takes into account the likely source
* language. Next, this phonetic representation is converted into a pan-european 'average' representation, allowing comparison between
* different versions of essentially the same word from different languages.
* </p>
* <p>
* This class is intentionally immutable. If you wish to alter the settings for a PhoneticEngine, you must make a new one with the updated
* settings. This makes the class thread-safe.
* </p>
* <p>
* Ported from phoneticengine.php
* </p>
*
* @author Apache Software Foundation
* @since 2.0
*/
public class PhoneticEngine {
static class PhonemeBuilder {
public static PhonemeBuilder empty(Languages.LanguageSet languages) {
return new PhonemeBuilder(Collections.singleton(new Rule.Phoneme("", languages)));
}
private final Set<Rule.Phoneme> phonemes;
private PhonemeBuilder(Set<Rule.Phoneme> phonemes) {
this.phonemes = phonemes;
}
public PhonemeBuilder append(CharSequence str) {
Set<Rule.Phoneme> newPhonemes = new HashSet<Rule.Phoneme>();
for (Rule.Phoneme ph : this.phonemes) {
newPhonemes.add(ph.append(str));
}
return new PhonemeBuilder(newPhonemes);
}
public PhonemeBuilder apply(Rule.PhonemeExpr phonemeExpr) {
Set<Rule.Phoneme> newPhonemes = new HashSet<Rule.Phoneme>();
for (Rule.Phoneme left : this.phonemes) {
for (Rule.Phoneme right : phonemeExpr.getPhonemes()) {
Rule.Phoneme join = left.join(right);
if (!join.getLanguages().isEmpty()) {
newPhonemes.add(join);
}
}
}
return new PhonemeBuilder(newPhonemes);
}
public Set<Rule.Phoneme> getPhonemes() {
return this.phonemes;
}
public String makeString() {
StringBuilder sb = new StringBuilder();
// System.err.println(this.phonemes.getClass());
for (Rule.Phoneme ph : this.phonemes) {
if (sb.length() > 0) {
sb.append("|");
}
sb.append(ph.getPhonemeText());
}
return sb.toString();
}
}
private static class RulesApplication {
private final List<Rule> finalRules;
private final CharSequence input;
private PhonemeBuilder phonemeBuilder;
private int i;
private boolean found;
public RulesApplication(List<Rule> finalRules, CharSequence input, PhonemeBuilder phonemeBuilder, int i) {
if (finalRules == null) {
throw new NullPointerException("The finalRules argument must not be null");
}
this.finalRules = finalRules;
this.phonemeBuilder = phonemeBuilder;
this.input = input;
this.i = i;
}
public int getI() {
return this.i;
}
public PhonemeBuilder getPhonemeBuilder() {
return this.phonemeBuilder;
}
public RulesApplication invoke() {
this.found = false;
int patternLength = 0;
RULES: for (Rule rule : this.finalRules) {
String pattern = rule.getPattern();
patternLength = pattern.length();
// log("trying pattern: " + pattern);
if (!rule.patternAndContextMatches(this.input, this.i)) {
// log("no match");
continue RULES;
}
this.phonemeBuilder = this.phonemeBuilder.apply(rule.getPhoneme());
this.found = true;
break RULES;
}
if (!this.found) {
patternLength = 1;
}
this.i += patternLength;
return this;
}
public boolean isFound() {
return this.found;
}
}
private static final Map<NameType, Set<String>> NAME_PREFIXES = new EnumMap<NameType, Set<String>>(NameType.class);
static {
NAME_PREFIXES.put(NameType.ASHKENAZI,
Collections.unmodifiableSet(new HashSet<String>(Arrays.asList("bar", "ben", "da", "de", "van", "von"))));
NAME_PREFIXES.put(NameType.SEPHARDIC, Collections.unmodifiableSet(new HashSet<String>(Arrays.asList("al", "el", "da", "dal", "de",
"del", "dela", "de la", "della", "des", "di", "do", "dos", "du", "van", "von"))));
NAME_PREFIXES.put(NameType.GENERIC, Collections.unmodifiableSet(new HashSet<String>(Arrays.asList("da", "dal", "de", "del", "dela",
"de la", "della", "des", "di", "do", "dos", "du", "van", "von"))));
}
private static String join(Iterable<String> strings, String sep) {
StringBuilder sb = new StringBuilder();
Iterator<String> si = strings.iterator();
if (si.hasNext()) {
sb.append(si.next());
}
while (si.hasNext()) {
sb.append(sep).append(si.next());
}
return sb.toString();
}
private final Lang lang;
private final NameType nameType;
private final RuleType ruleType;
private final boolean concat;
/**
* Generates a new, fully-configured phonetic engine.
*
* @param nameType
* the type of names it will use
* @param ruleType
* the type of rules it will apply
* @param concat
* if it will concatenate multiple encodings
*/
public PhoneticEngine(NameType nameType, RuleType ruleType, boolean concat) {
if (ruleType == RuleType.RULES) {
throw new IllegalArgumentException("ruleType must not be " + RuleType.RULES);
}
this.nameType = nameType;
this.ruleType = ruleType;
this.concat = concat;
this.lang = Lang.instance(nameType);
}
private PhonemeBuilder applyFinalRules(PhonemeBuilder phonemeBuilder, List<Rule> finalRules) {
if (finalRules == null) {
throw new NullPointerException("finalRules can not be null");
}
if (finalRules.isEmpty()) {
return phonemeBuilder;
}
Set<Rule.Phoneme> phonemes = new TreeSet<Rule.Phoneme>();
for (Rule.Phoneme phoneme : phonemeBuilder.getPhonemes()) {
PhonemeBuilder subBuilder = PhonemeBuilder.empty(phoneme.getLanguages());
CharSequence phonemeText = phoneme.getPhonemeText();
// System.err.println("Expanding: " + phonemeText);
for (int i = 0; i < phonemeText.length();) {
RulesApplication rulesApplication = new RulesApplication(finalRules, phonemeText, subBuilder, i).invoke();
boolean found = rulesApplication.isFound();
subBuilder = rulesApplication.getPhonemeBuilder();
if (!found) {
// System.err.println("Not found. Appending as-is");
subBuilder = subBuilder.append(phonemeText.subSequence(i, i + 1));
}
i = rulesApplication.getI();
// System.err.println(phonemeText + " " + i + ": " + subBuilder.makeString());
}
// System.err.println("Expanded to: " + subBuilder.makeString());
phonemes.addAll(subBuilder.getPhonemes());
}
return new PhonemeBuilder(phonemes);
}
/**
* Encodes a string to its phonetic representation.
*
* @param input
* the String to encode
* @return the encoding of the input
*/
public String encode(String input) {
Languages.LanguageSet languageSet = this.lang.guessLanguages(input);
return encode(input, languageSet);
}
/**
* Encodes an input string into an output phonetic representation, given a set of possible origin languages.
*
* @param input
* String to phoneticise; a String with dashes or spaces separating each word
* @param languageSet
* @return a phonetic representation of the input; a String containing '-'-separated phonetic representations of the input
*/
public String encode(String input, final Languages.LanguageSet languageSet) {
final List<Rule> rules = Rule.getInstance(this.nameType, RuleType.RULES, languageSet);
final List<Rule> finalRules1 = Rule.getInstance(this.nameType, this.ruleType, "common");
final List<Rule> finalRules2 = Rule.getInstance(this.nameType, this.ruleType, languageSet);
// System.err.println("Languages: " + languageSet);
// System.err.println("Rules: " + rules);
// tidy the input
// lower case is a locale-dependent operation
input = input.toLowerCase(Locale.ENGLISH).replace('-', ' ').trim();
if (this.nameType == NameType.GENERIC) {
+ if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d'
+ String remainder = input.substring(2);
+ String combined = "d" + remainder;
+ return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
+ }
for (String l : NAME_PREFIXES.get(this.nameType)) {
// handle generic prefixes
if (input.startsWith(l + " ")) {
// check for any prefix in the words list
String remainder = input.substring(l.length() + 1); // input without the prefix
String combined = l + remainder; // input with prefix without space
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
- // fixme: this case is invariant on l
- else if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d'
- String remainder = input.substring(2);
- String combined = "d" + remainder;
- return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
- }
}
}
final List<String> words = Arrays.asList(input.split("\\s+"));
final List<String> words2 = new ArrayList<String>();
switch (this.nameType) {
case SEPHARDIC:
for (String aWord : words) {
String[] parts = aWord.split("'");
String lastPart = parts[parts.length - 1];
words2.add(lastPart);
}
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case ASHKENAZI:
words2.addAll(words);
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case GENERIC:
words2.addAll(words);
break;
default:
throw new IllegalStateException("Unreachable case: " + this.nameType);
}
if (this.concat) {
// concat mode enabled
input = join(words2, " ");
} else if (words2.size() == 1) {
// not a multi-word name
input = words.iterator().next();
} else {
// encode each word in a multi-word name separately (normally used for approx matches)
StringBuilder result = new StringBuilder();
for (String word : words2) {
result.append("-").append(encode(word));
}
// return the result without the leading "-"
return result.substring(1);
}
PhonemeBuilder phonemeBuilder = PhonemeBuilder.empty(languageSet);
// loop over each char in the input - we will handle the increment manually
for (int i = 0; i < input.length();) {
RulesApplication rulesApplication = new RulesApplication(rules, input, phonemeBuilder, i).invoke();
i = rulesApplication.getI();
phonemeBuilder = rulesApplication.getPhonemeBuilder();
// System.err.println(input + " " + i + ": " + phonemeBuilder.makeString());
}
// System.err.println("Applying general rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules1);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Applying language-specific rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules2);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Done");
return phonemeBuilder.makeString();
}
/**
* Gets the Lang language guessing rules being used.
*
* @return the Lang in use
*/
public Lang getLang() {
return this.lang;
}
/**
* Gets the NameType being used.
*
* @return the NameType in use
*/
public NameType getNameType() {
return this.nameType;
}
/**
* Gets the RuleType being used.
*
* @return the RuleType in use
*/
public RuleType getRuleType() {
return this.ruleType;
}
/**
* Gets if multiple phonetic encodings are concatenated or if just the first one is kept.
*
* @return true if multiple phonetic encodings are returned, false if just the first is.
*/
public boolean isConcat() {
return this.concat;
}
}
| false | true | public String encode(String input, final Languages.LanguageSet languageSet) {
final List<Rule> rules = Rule.getInstance(this.nameType, RuleType.RULES, languageSet);
final List<Rule> finalRules1 = Rule.getInstance(this.nameType, this.ruleType, "common");
final List<Rule> finalRules2 = Rule.getInstance(this.nameType, this.ruleType, languageSet);
// System.err.println("Languages: " + languageSet);
// System.err.println("Rules: " + rules);
// tidy the input
// lower case is a locale-dependent operation
input = input.toLowerCase(Locale.ENGLISH).replace('-', ' ').trim();
if (this.nameType == NameType.GENERIC) {
for (String l : NAME_PREFIXES.get(this.nameType)) {
// handle generic prefixes
if (input.startsWith(l + " ")) {
// check for any prefix in the words list
String remainder = input.substring(l.length() + 1); // input without the prefix
String combined = l + remainder; // input with prefix without space
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
// fixme: this case is invariant on l
else if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d'
String remainder = input.substring(2);
String combined = "d" + remainder;
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
}
}
final List<String> words = Arrays.asList(input.split("\\s+"));
final List<String> words2 = new ArrayList<String>();
switch (this.nameType) {
case SEPHARDIC:
for (String aWord : words) {
String[] parts = aWord.split("'");
String lastPart = parts[parts.length - 1];
words2.add(lastPart);
}
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case ASHKENAZI:
words2.addAll(words);
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case GENERIC:
words2.addAll(words);
break;
default:
throw new IllegalStateException("Unreachable case: " + this.nameType);
}
if (this.concat) {
// concat mode enabled
input = join(words2, " ");
} else if (words2.size() == 1) {
// not a multi-word name
input = words.iterator().next();
} else {
// encode each word in a multi-word name separately (normally used for approx matches)
StringBuilder result = new StringBuilder();
for (String word : words2) {
result.append("-").append(encode(word));
}
// return the result without the leading "-"
return result.substring(1);
}
PhonemeBuilder phonemeBuilder = PhonemeBuilder.empty(languageSet);
// loop over each char in the input - we will handle the increment manually
for (int i = 0; i < input.length();) {
RulesApplication rulesApplication = new RulesApplication(rules, input, phonemeBuilder, i).invoke();
i = rulesApplication.getI();
phonemeBuilder = rulesApplication.getPhonemeBuilder();
// System.err.println(input + " " + i + ": " + phonemeBuilder.makeString());
}
// System.err.println("Applying general rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules1);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Applying language-specific rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules2);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Done");
return phonemeBuilder.makeString();
}
| public String encode(String input, final Languages.LanguageSet languageSet) {
final List<Rule> rules = Rule.getInstance(this.nameType, RuleType.RULES, languageSet);
final List<Rule> finalRules1 = Rule.getInstance(this.nameType, this.ruleType, "common");
final List<Rule> finalRules2 = Rule.getInstance(this.nameType, this.ruleType, languageSet);
// System.err.println("Languages: " + languageSet);
// System.err.println("Rules: " + rules);
// tidy the input
// lower case is a locale-dependent operation
input = input.toLowerCase(Locale.ENGLISH).replace('-', ' ').trim();
if (this.nameType == NameType.GENERIC) {
if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d'
String remainder = input.substring(2);
String combined = "d" + remainder;
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
for (String l : NAME_PREFIXES.get(this.nameType)) {
// handle generic prefixes
if (input.startsWith(l + " ")) {
// check for any prefix in the words list
String remainder = input.substring(l.length() + 1); // input without the prefix
String combined = l + remainder; // input with prefix without space
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
}
}
final List<String> words = Arrays.asList(input.split("\\s+"));
final List<String> words2 = new ArrayList<String>();
switch (this.nameType) {
case SEPHARDIC:
for (String aWord : words) {
String[] parts = aWord.split("'");
String lastPart = parts[parts.length - 1];
words2.add(lastPart);
}
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case ASHKENAZI:
words2.addAll(words);
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case GENERIC:
words2.addAll(words);
break;
default:
throw new IllegalStateException("Unreachable case: " + this.nameType);
}
if (this.concat) {
// concat mode enabled
input = join(words2, " ");
} else if (words2.size() == 1) {
// not a multi-word name
input = words.iterator().next();
} else {
// encode each word in a multi-word name separately (normally used for approx matches)
StringBuilder result = new StringBuilder();
for (String word : words2) {
result.append("-").append(encode(word));
}
// return the result without the leading "-"
return result.substring(1);
}
PhonemeBuilder phonemeBuilder = PhonemeBuilder.empty(languageSet);
// loop over each char in the input - we will handle the increment manually
for (int i = 0; i < input.length();) {
RulesApplication rulesApplication = new RulesApplication(rules, input, phonemeBuilder, i).invoke();
i = rulesApplication.getI();
phonemeBuilder = rulesApplication.getPhonemeBuilder();
// System.err.println(input + " " + i + ": " + phonemeBuilder.makeString());
}
// System.err.println("Applying general rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules1);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Applying language-specific rules");
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules2);
// System.err.println("Now got: " + phonemeBuilder.makeString());
// System.err.println("Done");
return phonemeBuilder.makeString();
}
|
diff --git a/cdm/src/main/java/ucar/nc2/geotiff/GeotiffWriter.java b/cdm/src/main/java/ucar/nc2/geotiff/GeotiffWriter.java
index 30db83c33..43c8b8626 100644
--- a/cdm/src/main/java/ucar/nc2/geotiff/GeotiffWriter.java
+++ b/cdm/src/main/java/ucar/nc2/geotiff/GeotiffWriter.java
@@ -1,643 +1,643 @@
// $Id:GeotiffWriter.java 63 2006-07-12 21:50:51Z edavis $
/*
* Copyright 1997-2006 Unidata Program Center/University Corporation for
* Atmospheric Research, P.O. Box 3000, Boulder, CO 80307,
* [email protected].
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or (at
* your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
* General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package ucar.nc2.geotiff;
import ucar.ma2.*;
import ucar.nc2.dataset.*;
import ucar.nc2.dt.GridCoordSystem;
import ucar.nc2.dt.GridDataset;
import ucar.nc2.dt.GridDatatype;
import ucar.unidata.geoloc.*;
import ucar.unidata.geoloc.projection.*;
import java.io.*;
/**
*
* @author caron, yuan
* @version $Revision:63 $ $Date:2006-07-12 21:50:51Z $
*/
public class GeotiffWriter {
private String fileOut;
private GeoTiff geotiff;
private short pageNumber = 1;
/**
* Geotiff writer.
* @param fileOut name of output file.
*/
public GeotiffWriter(String fileOut) {
this.fileOut = fileOut;
geotiff = new GeoTiff(fileOut);
}
/**
* Write Grid data to the geotiff file.
*
* @param dataset
* @param grid
* @param data 2D array in YX order
* @param greyScale if true, write greyScale image, else dataSample.
* @throws IOException
*/
public void writeGrid(GridDataset dataset, GridDatatype grid, Array data, boolean greyScale) throws IOException {
GridCoordSystem gcs = grid.getCoordinateSystem();
if (!gcs.isRegularSpatial())
throw new IllegalArgumentException("Must have 1D x and y axes for "+ grid.getName());
CoordinateAxis1D xaxis = (CoordinateAxis1D) gcs.getXHorizAxis();
CoordinateAxis1D yaxis = (CoordinateAxis1D) gcs.getYHorizAxis();
//latlon coord does not need to be scaled
double scaler = (gcs.isLatLon()) ? 1.0 : 1000.0;
// data must go from top to bottom LOOK IS THIS REALLY NEEDED ?
double xStart = xaxis.getCoordValue(0) * scaler;
double yStart = yaxis.getCoordValue(0) * scaler;
double xInc = xaxis.getIncrement() * scaler;
double yInc = Math.abs(yaxis.getIncrement()) * scaler;
if (yaxis.getCoordValue(0) < yaxis.getCoordValue(1)) {
data = data.flip(0);
yStart = yaxis.getCoordValue((int)yaxis.getSize()-1) * scaler;
}
if (gcs.isLatLon()) {
Array lon = xaxis.read();
data = geoShiftDataAtLon(data, lon);
xStart = geoShiftGetXstart(lon, xInc);
//xStart = -180.0;
}
if (!xaxis.isRegular() || !yaxis.isRegular())
throw new IllegalArgumentException("Must be evenly spaced grid = "+ grid.getName());
if (pageNumber > 1)
geotiff.initTags();
// write it out
writeGrid(grid, data, greyScale, xStart, yStart, xInc, yInc, pageNumber);
pageNumber++;
}
public void writeGrid(String fileName, String gridName, int time, int level, boolean greyScale, LatLonRect pt) throws IOException {
double scaler;
GridDataset dataset = ucar.nc2.dt.grid.GridDataset.open(fileName);
GridDatatype grid = dataset.findGridDatatype( gridName);
GridCoordSystem gcs = grid.getCoordinateSystem();
if (grid == null)
throw new IllegalArgumentException("No grid named "+ gridName+" in fileName");
if (!gcs.isRegularSpatial())
throw new IllegalArgumentException("Must have 1D x and y axes for "+ grid.getName());
CoordinateAxis1D xaxis = (CoordinateAxis1D) gcs.getXHorizAxis();
CoordinateAxis1D yaxis = (CoordinateAxis1D) gcs.getYHorizAxis();
if (!xaxis.isRegular() || !yaxis.isRegular())
throw new IllegalArgumentException("Must be evenly spaced grid = "+ grid.getName());
// read in data
Array data = grid.readDataSlice(time, level, -1, -1);
Array lon = xaxis.read();
Array lat = yaxis.read();
//latlon coord does not need to time 1000.0
if (gcs.isLatLon()) scaler = 1.0;
else scaler = 1000.0;
if (yaxis.getCoordValue(0) < yaxis.getCoordValue(1)) {
data = data.flip(0);
lat = lat.flip(0);
}
if (gcs.isLatLon()) {
data = geoShiftDataAtLon(data, lon);
lon = geoShiftLon(lon);
}
// now it is time to subset the data out of latlonrect
// it is assumed that latlonrect pt is in +-180
LatLonPointImpl llp0 = pt.getLowerLeftPoint();
LatLonPointImpl llpn = pt.getUpperRightPoint();
double minLon = llp0.getLongitude();
double minLat = llp0.getLatitude();
double maxLon = llpn.getLongitude();
double maxLat = llpn.getLatitude();
// (x1, y1) is upper left point and (x2, y2) is lower right point
int x1 = getLonIndex(lon, minLon, 0);
int y1 = getLatIndex(lat, maxLat, 0);
int x2 = getLonIndex(lon, maxLon, 1);
int y2 = getLatIndex(lat, minLat, 1);
// data must go from top to bottom LOOK IS THIS REALLY NEEDED ?
double xStart = minLon;
double yStart = maxLat;
double xInc = xaxis.getIncrement() * scaler;
double yInc = Math.abs(yaxis.getIncrement()) * scaler;
// subseting data inside the box
Array data1 = getYXDataInBox(data, x1, x2, y1, y2);
if (pageNumber > 1)
geotiff.initTags();
// write it out
writeGrid(grid, data1, greyScale, xStart, yStart, xInc, yInc, pageNumber);
pageNumber++;
}
int getLatIndex( Array lat, double value, int side)
{
int[] shape = lat.getShape();
IndexIterator latIter = lat.getIndexIterator();
Index ind = lat.getIndex();
int count = 0;
int isInd = 0;
//LatLonPoint p0 = new LatLonPointImpl(lat.getFloat(ind.set(0)), 0);
double xlat = latIter.getFloatNext();
if ( xlat == value ) return 0;
while (latIter.hasNext() && xlat > value) {
count++;
xlat = latIter.getFloatNext();
if (xlat == value) isInd = 1;
}
if( isInd == 1) count += side;
count -= side;
return count;
}
int getLonIndex( Array lon, double value, int side)
{
int[] shape = lon.getShape();
IndexIterator lonIter = lon.getIndexIterator();
Index ind = lon.getIndex();
int count = 0;
int isInd = 0;
// double xlon = lon.getFloat(ind.set(0));
float xlon = lonIter.getFloatNext();
if ( xlon == value ) return 0;
while (lonIter.hasNext() && xlon < value) {
count++;
xlon = lonIter.getFloatNext();
if ( xlon == value ) isInd = 1;
}
if(isInd == 1) count += side;
count -= side;
return count;
}
public Array getYXDataInBox(Array data, int x1, int x2, int y1, int y2) throws java.io.IOException {
int rank = data.getRank();
int [] start = new int[rank];
int [] shape = new int[rank];
for (int i=0; i<rank; i++) {
start[i] = 0;
shape[i] = 1;
}
if ( y1 >= 0 && y2 >=0 ) {
start[ 0] = y1;
shape[ 0] = y2 - y1;
}
if (x1 >= 0 && x2 >=0 ) {
start[ 1] = x1;
shape[ 1] = x2 - x1;
}
// read it
Array dataVolume;
try {
dataVolume = data.section( start, shape);
} catch (Exception e) {
throw new java.io.IOException(e.getMessage());
}
return dataVolume;
}
/**
* Write Grid data to the geotiff file.
* Grid currently must:
* <ol>
* <li> have a 1D X and Y coordinate axes.
* <li> be lat/lon or Lambert Conformal Projection
* <li> be equally spaced
* </ol>
* @param grid original grid
* @param data 2D array in YX order
* @param greyScale if true, write greyScale image, else dataSample.
* @param xStart
* @param yStart
* @param xInc
* @param yInc
* @param imageNumber
* @throws IOException
* @throws IllegalArgumentException if above assumptions not valid
*/
public void writeGrid(GridDatatype grid, Array data, boolean greyScale, double xStart, double yStart, double xInc, double yInc, int imageNumber) throws IOException {
int nextStart = 0;
GridCoordSystem gcs = grid.getCoordinateSystem();
// get rid of this when all projections are implemented
if (!gcs.isLatLon() && !(gcs.getProjection() instanceof LambertConformal)
&& !(gcs.getProjection() instanceof Stereographic))
throw new IllegalArgumentException("Must be lat/lon or LambertConformal grid = "+ gcs.getProjection().getClass().getName());
// write the data first
if (greyScale) {
ArrayByte result = replaceMissingValuesAndScale( grid, data);
nextStart = geotiff.writeData( (byte []) result.getStorage(), imageNumber);
} else {
ArrayFloat result = replaceMissingValues( grid, data);
nextStart = geotiff.writeData( (float []) result.getStorage(), imageNumber);
}
// set the width and the height
int elemSize = greyScale ? 1 : 4;
int height = data.getShape()[0]; // Y
int width = data.getShape()[1]; // X
int size = elemSize * height * width; // size in bytes
geotiff.addTag( new IFDEntry(Tag.ImageWidth, FieldType.SHORT).setValue( width));
geotiff.addTag( new IFDEntry(Tag.ImageLength, FieldType.SHORT).setValue( height));
// set the multiple images tag
int ff = 1 << 1;
int page = imageNumber -1;
geotiff.addTag( new IFDEntry(Tag.NewSubfileType, FieldType.SHORT).setValue(ff));
geotiff.addTag( new IFDEntry(Tag.PageNumber, FieldType.SHORT).setValue( page, 2));
// just make it all one big "row"
geotiff.addTag( new IFDEntry(Tag.RowsPerStrip, FieldType.SHORT).setValue( height));
geotiff.addTag( new IFDEntry(Tag.StripByteCounts, FieldType.LONG).setValue( size));
// data starts here, header is written at the end
if( imageNumber == 1 )
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue( 8));
else
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue(nextStart));
// standard tags
geotiff.addTag( new IFDEntry(Tag.Orientation, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.Compression, FieldType.SHORT).setValue( 1)); // no compression
geotiff.addTag( new IFDEntry(Tag.Software, FieldType.ASCII).setValue( "nc2geotiff"));
geotiff.addTag( new IFDEntry(Tag.PhotometricInterpretation, FieldType.SHORT).setValue( 1)); // black is zero : not used?
geotiff.addTag( new IFDEntry(Tag.PlanarConfiguration, FieldType.SHORT).setValue( 1));
if (greyScale) {
// standard tags for Greyscale images ( see TIFF spec, section 4)
geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 8)); // 8 bits per sample
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.XResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.YResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.ResolutionUnit, FieldType.SHORT).setValue( 1));
} else {
// standard tags for SampleFormat ( see TIFF spec, section 19)
- geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 32)); // 32 bits per sample
+ geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 8, 8, 8)); // 32 bits per sample
geotiff.addTag( new IFDEntry(Tag.SampleFormat, FieldType.SHORT).setValue( 3)); // Sample Format
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
MAMath.MinMax dataMinMax = grid.getMinMaxSkipMissingData( data);
float min = (float) (dataMinMax.min);
float max = (float) (dataMinMax.max);
geotiff.addTag( new IFDEntry(Tag.SMinSampleValue, FieldType.FLOAT).setValue( min));
geotiff.addTag( new IFDEntry(Tag.SMaxSampleValue, FieldType.FLOAT).setValue( max));
}
/*
geotiff.addTag( new IFDEntry(Tag.Geo_ModelPixelScale, FieldType.DOUBLE).setValue(
new double[] {5.0, 2.5, 0.0} ));
geotiff.addTag( new IFDEntry(Tag.Geo_ModelTiepoint, FieldType.DOUBLE).setValue(
new double[] {0.0, 0.0, 0.0, -180.0, 90.0, 0.0 } ));
// new double[] {0.0, 0.0, 0.0, 183.0, 90.0, 0.0} ));
IFDEntry ifd = new IFDEntry(Tag.Geo_KeyDirectory, FieldType.SHORT).setValue(
new int[] {1, 1, 0, 4, 1024, 0, 1, 2, 1025, 0, 1, 1, 2048, 0, 1, 4326, 2054, 0, 1, 9102} );
geotiff.addTag( ifd);
*/
// set the transformation from projection to pixel, add tie point tag
geotiff.setTransform( xStart, yStart, xInc, yInc);
if (gcs.isLatLon())
addLatLonTags();
else if (gcs.getProjection() instanceof LambertConformal)
addLambertConformalTags((LambertConformal) gcs.getProjection(), xStart, yStart);
else if (gcs.getProjection() instanceof Stereographic)
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
else
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
geotiff.writeMetadata(imageNumber);
//geotiff.close();
}
public void close()throws IOException
{
geotiff.close();
}
/**
* Replace missing values with dataMinMax.min - 1.0; return a floating point data array.
* @param grid GridDatatype
* @param data input data array
* @return floating point data array with missing values replaced.
*/
private ArrayFloat replaceMissingValues( GridDatatype grid, Array data) {
MAMath.MinMax dataMinMax = grid.getMinMaxSkipMissingData( data);
float minValue = (float) (dataMinMax.min - 1.0);
ArrayFloat floatArray = (ArrayFloat) Array.factory( float.class, data.getShape());
IndexIterator dataIter = data.getIndexIterator();
IndexIterator floatIter = floatArray.getIndexIterator();
while (dataIter.hasNext()) {
float v = dataIter.getFloatNext();
if ( grid.isMissingData( (double) v)) v = minValue;
floatIter.setFloatNext( v);
}
return floatArray;
}
/**
* Replace missing values with 0; scale other values between 1 and 255, return a byte data array.
* @param grid GridDatatype
* @param data input data array
* @return byte data array with missing values replaced and data scaled from 1- 255.
*/
private ArrayByte replaceMissingValuesAndScale( GridDatatype grid, Array data) {
MAMath.MinMax dataMinMax = grid.getMinMaxSkipMissingData( data);
double scale = 254.0/(dataMinMax.max - dataMinMax.min);
ArrayByte byteArray = (ArrayByte) Array.factory( byte.class, data.getShape());
IndexIterator dataIter = data.getIndexIterator();
IndexIterator resultIter = byteArray.getIndexIterator();
byte bv;
while (dataIter.hasNext()) {
double v = dataIter.getDoubleNext();
if ( grid.isMissingData( v))
bv = 0;
else {
int iv = (int) ((v - dataMinMax.min) * scale + 1);
bv = (byte) (iv & 0xff);
}
resultIter.setByteNext( bv);
}
return byteArray;
}
private void addLatLonTags() {
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTModelTypeGeoKey, GeoKey.TagValue.ModelType_Geographic));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTRasterTypeGeoKey, GeoKey.TagValue.RasterType_Area));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeographicTypeGeoKey, GeoKey.TagValue.GeographicType_WGS_84));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogPrimeMeridianGeoKey, GeoKey.TagValue.GeogPrimeMeridian_GREENWICH));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogAngularUnitsGeoKey, GeoKey.TagValue.GeogAngularUnits_DEGREE));
}
private void addPolarStereographicTags(Stereographic proj, double FalseEasting, double FalseNorthing) {
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTModelTypeGeoKey, GeoKey.TagValue.ModelType_Projected));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTRasterTypeGeoKey, GeoKey.TagValue.RasterType_Area));
// define the "geographic Coordinate System"
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeographicTypeGeoKey, GeoKey.TagValue.GeographicType_WGS_84));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogPrimeMeridianGeoKey, GeoKey.TagValue.GeogPrimeMeridian_GREENWICH));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogAngularUnitsGeoKey, GeoKey.TagValue.GeogAngularUnits_DEGREE));
// define the "coordinate transformation"
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjectedCSTypeGeoKey, GeoKey.TagValue.ProjectedCSType_UserDefined));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.PCSCitationGeoKey, "Snyder"));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjectionGeoKey, GeoKey.TagValue.ProjectedCSType_UserDefined));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjLinearUnitsGeoKey, GeoKey.TagValue.ProjLinearUnits_METER));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjLinearUnitsSizeGeoKey, 1.0)); // units of km
// the specifics for Polar Stereographic
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjCoordTransGeoKey, GeoKey.TagValue.ProjCoordTrans_Stereographic));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjCenterLongGeoKey, 0.0));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjNatOriginLatGeoKey, 90.0));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjNatOriginLongGeoKey, proj.getTangentLon()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjScaleAtNatOriginGeoKey, 1.0));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjFalseEastingGeoKey, 0.0));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjFalseNorthingGeoKey, 0.0));
}
private void addLambertConformalTags(LambertConformal proj, double FalseEasting, double FalseNorthing) {
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTModelTypeGeoKey, GeoKey.TagValue.ModelType_Projected));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GTRasterTypeGeoKey, GeoKey.TagValue.RasterType_Area));
// define the "geographic Coordinate System"
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeographicTypeGeoKey, GeoKey.TagValue.GeographicType_WGS_84));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogPrimeMeridianGeoKey, GeoKey.TagValue.GeogPrimeMeridian_GREENWICH));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.GeogAngularUnitsGeoKey, GeoKey.TagValue.GeogAngularUnits_DEGREE));
// define the "coordinate transformation"
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjectedCSTypeGeoKey, GeoKey.TagValue.ProjectedCSType_UserDefined));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.PCSCitationGeoKey, "Snyder"));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjectionGeoKey, GeoKey.TagValue.ProjectedCSType_UserDefined));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjLinearUnitsGeoKey, GeoKey.TagValue.ProjLinearUnits_METER));
//geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjLinearUnitsSizeGeoKey, 1.0)); // units of km
// the specifics for lambert conformal
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjCoordTransGeoKey, GeoKey.TagValue.ProjCoordTrans_LambertConfConic_2SP));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjStdParallel1GeoKey, proj.getParallelOne()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjStdParallel2GeoKey, proj.getParallelTwo()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjCenterLongGeoKey, proj.getOriginLon()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjNatOriginLatGeoKey, proj.getOriginLat()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjNatOriginLongGeoKey, proj.getOriginLon()));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjScaleAtNatOriginGeoKey, 1.0));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjFalseEastingGeoKey, 0.0));
geotiff.addGeoKey( new GeoKey( GeoKey.Tag.ProjFalseNorthingGeoKey, 0.0));
}
private void dump( Array data, int col) {
int[] shape = data.getShape();
Index ima = data.getIndex();
for(int j = 0; j< shape[0]; j++ ) {
float dd = data.getFloat(ima.set(j, col));
System.out.println(j+" value= "+dd);
}
}
private double geoShiftGetXstart(Array lon, double inc) {
int count = 0;
Index ilon = lon.getIndex();
int[] lonShape = lon.getShape();
IndexIterator lonIter = lon.getIndexIterator();
double xlon = 0.0;
LatLonPoint p0 = new LatLonPointImpl(0, lon.getFloat(ilon.set(0)));
LatLonPoint pN = new LatLonPointImpl(0, lon.getFloat(ilon.set(lonShape[0] -1)));
xlon = p0.getLongitude();
while (lonIter.hasNext()) {
float l = lonIter.getFloatNext();
LatLonPoint pn = new LatLonPointImpl(0, l);
if ( pn.getLongitude() < xlon ) xlon = pn.getLongitude();
}
if ( p0.getLongitude() == pN.getLongitude() ) xlon = xlon - inc;
return xlon;
}
private Array geoShiftDataAtLon(Array data, Array lon) {
int count = 0;
int[] shape = data.getShape();
Index ima = data.getIndex();
Index ilon = lon.getIndex();
int[] lonShape = lon.getShape();
ArrayFloat adata = new ArrayFloat(new int[] {shape[0], shape[1]});
Index imaa = adata.getIndex();
IndexIterator lonIter = lon.getIndexIterator();
LatLonPoint p0 = new LatLonPointImpl(0, lon.getFloat(ilon.set(lonShape[0] -1)));
LatLonPoint pN = new LatLonPointImpl(0, lon.getFloat(ilon.set( 0 )));
while (lonIter.hasNext()) {
float l = lonIter.getFloatNext();
if (l > 180.0 ) count++;
}
//checking if the 0 point and the N point are the same point
int spoint = 0;
if ( p0.getLongitude() == pN.getLongitude() )
{
spoint = shape[1] - count -1 ;
}
else
{
spoint = shape[1] - count;
}
if ( count > 0 && (shape[1] > count) ) {
for(int j = 1; j< shape[1]; j++ ) {
int jj = 0;
if( j >= count ) jj = j - count;
else jj = j + spoint;
for(int i = 0; i < shape[0]; i++) {
float dd = data.getFloat(ima.set(i, jj));
adata.setFloat(imaa.set(i, j), dd );
}
}
if ( p0.getLongitude() == pN.getLongitude() )
{
for(int i = 0; i < shape[0]; i++) {
float dd = adata.getFloat(imaa.set(i, shape[1] -1));
adata.setFloat(imaa.set(i, 0), dd );
}
}
return adata;
} else
return data;
}
private Array geoShiftLon(Array lon) {
int count = 0;
Index lonIndex = lon.getIndex();
int[] lonShape = lon.getShape();
ArrayFloat slon = new ArrayFloat(new int[] {lonShape[0]});
Index slonIndex= slon.getIndex();
IndexIterator lonIter = lon.getIndexIterator();
LatLonPointImpl llp = new LatLonPointImpl();
LatLonPoint p0 = new LatLonPointImpl(0, lon.getFloat(lonIndex.set(lonShape[0] -1)));
LatLonPoint pN = new LatLonPointImpl(0, lon.getFloat(lonIndex.set( 0 )));
while (lonIter.hasNext()) {
float l = lonIter.getFloatNext();
if (l > 180.0 ) count++;
}
//checking if the 0 point and the N point are the same point
int spoint = 0;
if ( p0.getLongitude() == pN.getLongitude() )
{
spoint = lonShape[0] - count -1 ;
}
else
{
spoint = lonShape[0] - count;
}
if ( count > 0 && (lonShape[0] > count) ) {
for(int j = 1; j< lonShape[0]; j++ ) {
int jj = 0;
if( j >= count ) jj = j - count;
else jj = j + spoint;
float dd = lon.getFloat(lonIndex.set(jj));
slon.setFloat(slonIndex.set(j), (float)LatLonPointImpl.lonNormal(dd) );
}
if ( p0.getLongitude() == pN.getLongitude() )
{
float dd = slon.getFloat(slonIndex.set(lonShape[0] -1));
slon.setFloat(slonIndex.set( 0), -(float)LatLonPointImpl.lonNormal(dd));
}
return slon;
} else
return lon;
}
/** test */
public static void main(String args[]) throws IOException {
String fileOut = "totalr1.tif";
LatLonPointImpl p1 = new LatLonPointImpl(-15.0, -180.0);
LatLonPointImpl p2 = new LatLonPointImpl(60.0, 180.0);
LatLonRect llr = new LatLonRect(p1, p2);
GeotiffWriter writer = new GeotiffWriter(fileOut);
//writer.writeGrid("radar.nc", "noice_wat", 0, 0, true);
//writer.writeGrid("dods://www.cdc.noaa.gov/cgi-bin/nph-nc/Datasets/coads/2degree/enh/cldc.mean.nc?lat[40:1:50],lon[70:1:110],time[2370:1:2375],cldc[2370:1:2375][40:1:50][70:1:110]", "cldc", 0, 0,true);
//writer.writeGrid("dods://www.cdc.noaa.gov/cgi-bin/nph-nc/Datasets/noaa.oisst.v2/sst.mnmean.nc", "sst", 0, 0,false);
//writer.writeGrid("2003091116_ruc2.nc", "P_sfc", 0, 0, false);
//writer.writeGrid("/home/yuanho/dev/netcdf-java/geotiff/2003072918_avn-x.nc", "P_sfc", 0, 0, true);
writer.writeGrid("/home/yuanho/dev/netcdf-java/geotiff/2003072918_avn-x.nc", "T", 0, 0, true, llr);
writer.close();
// read it back in
GeoTiff geotiff = new GeoTiff(fileOut);
geotiff.read();
System.out.println("geotiff read in = "+geotiff.showInfo());
geotiff.close();
}
}
| true | true | public void writeGrid(GridDatatype grid, Array data, boolean greyScale, double xStart, double yStart, double xInc, double yInc, int imageNumber) throws IOException {
int nextStart = 0;
GridCoordSystem gcs = grid.getCoordinateSystem();
// get rid of this when all projections are implemented
if (!gcs.isLatLon() && !(gcs.getProjection() instanceof LambertConformal)
&& !(gcs.getProjection() instanceof Stereographic))
throw new IllegalArgumentException("Must be lat/lon or LambertConformal grid = "+ gcs.getProjection().getClass().getName());
// write the data first
if (greyScale) {
ArrayByte result = replaceMissingValuesAndScale( grid, data);
nextStart = geotiff.writeData( (byte []) result.getStorage(), imageNumber);
} else {
ArrayFloat result = replaceMissingValues( grid, data);
nextStart = geotiff.writeData( (float []) result.getStorage(), imageNumber);
}
// set the width and the height
int elemSize = greyScale ? 1 : 4;
int height = data.getShape()[0]; // Y
int width = data.getShape()[1]; // X
int size = elemSize * height * width; // size in bytes
geotiff.addTag( new IFDEntry(Tag.ImageWidth, FieldType.SHORT).setValue( width));
geotiff.addTag( new IFDEntry(Tag.ImageLength, FieldType.SHORT).setValue( height));
// set the multiple images tag
int ff = 1 << 1;
int page = imageNumber -1;
geotiff.addTag( new IFDEntry(Tag.NewSubfileType, FieldType.SHORT).setValue(ff));
geotiff.addTag( new IFDEntry(Tag.PageNumber, FieldType.SHORT).setValue( page, 2));
// just make it all one big "row"
geotiff.addTag( new IFDEntry(Tag.RowsPerStrip, FieldType.SHORT).setValue( height));
geotiff.addTag( new IFDEntry(Tag.StripByteCounts, FieldType.LONG).setValue( size));
// data starts here, header is written at the end
if( imageNumber == 1 )
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue( 8));
else
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue(nextStart));
// standard tags
geotiff.addTag( new IFDEntry(Tag.Orientation, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.Compression, FieldType.SHORT).setValue( 1)); // no compression
geotiff.addTag( new IFDEntry(Tag.Software, FieldType.ASCII).setValue( "nc2geotiff"));
geotiff.addTag( new IFDEntry(Tag.PhotometricInterpretation, FieldType.SHORT).setValue( 1)); // black is zero : not used?
geotiff.addTag( new IFDEntry(Tag.PlanarConfiguration, FieldType.SHORT).setValue( 1));
if (greyScale) {
// standard tags for Greyscale images ( see TIFF spec, section 4)
geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 8)); // 8 bits per sample
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.XResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.YResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.ResolutionUnit, FieldType.SHORT).setValue( 1));
} else {
// standard tags for SampleFormat ( see TIFF spec, section 19)
geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 32)); // 32 bits per sample
geotiff.addTag( new IFDEntry(Tag.SampleFormat, FieldType.SHORT).setValue( 3)); // Sample Format
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
MAMath.MinMax dataMinMax = grid.getMinMaxSkipMissingData( data);
float min = (float) (dataMinMax.min);
float max = (float) (dataMinMax.max);
geotiff.addTag( new IFDEntry(Tag.SMinSampleValue, FieldType.FLOAT).setValue( min));
geotiff.addTag( new IFDEntry(Tag.SMaxSampleValue, FieldType.FLOAT).setValue( max));
}
/*
geotiff.addTag( new IFDEntry(Tag.Geo_ModelPixelScale, FieldType.DOUBLE).setValue(
new double[] {5.0, 2.5, 0.0} ));
geotiff.addTag( new IFDEntry(Tag.Geo_ModelTiepoint, FieldType.DOUBLE).setValue(
new double[] {0.0, 0.0, 0.0, -180.0, 90.0, 0.0 } ));
// new double[] {0.0, 0.0, 0.0, 183.0, 90.0, 0.0} ));
IFDEntry ifd = new IFDEntry(Tag.Geo_KeyDirectory, FieldType.SHORT).setValue(
new int[] {1, 1, 0, 4, 1024, 0, 1, 2, 1025, 0, 1, 1, 2048, 0, 1, 4326, 2054, 0, 1, 9102} );
geotiff.addTag( ifd);
*/
// set the transformation from projection to pixel, add tie point tag
geotiff.setTransform( xStart, yStart, xInc, yInc);
if (gcs.isLatLon())
addLatLonTags();
else if (gcs.getProjection() instanceof LambertConformal)
addLambertConformalTags((LambertConformal) gcs.getProjection(), xStart, yStart);
else if (gcs.getProjection() instanceof Stereographic)
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
else
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
geotiff.writeMetadata(imageNumber);
//geotiff.close();
}
| public void writeGrid(GridDatatype grid, Array data, boolean greyScale, double xStart, double yStart, double xInc, double yInc, int imageNumber) throws IOException {
int nextStart = 0;
GridCoordSystem gcs = grid.getCoordinateSystem();
// get rid of this when all projections are implemented
if (!gcs.isLatLon() && !(gcs.getProjection() instanceof LambertConformal)
&& !(gcs.getProjection() instanceof Stereographic))
throw new IllegalArgumentException("Must be lat/lon or LambertConformal grid = "+ gcs.getProjection().getClass().getName());
// write the data first
if (greyScale) {
ArrayByte result = replaceMissingValuesAndScale( grid, data);
nextStart = geotiff.writeData( (byte []) result.getStorage(), imageNumber);
} else {
ArrayFloat result = replaceMissingValues( grid, data);
nextStart = geotiff.writeData( (float []) result.getStorage(), imageNumber);
}
// set the width and the height
int elemSize = greyScale ? 1 : 4;
int height = data.getShape()[0]; // Y
int width = data.getShape()[1]; // X
int size = elemSize * height * width; // size in bytes
geotiff.addTag( new IFDEntry(Tag.ImageWidth, FieldType.SHORT).setValue( width));
geotiff.addTag( new IFDEntry(Tag.ImageLength, FieldType.SHORT).setValue( height));
// set the multiple images tag
int ff = 1 << 1;
int page = imageNumber -1;
geotiff.addTag( new IFDEntry(Tag.NewSubfileType, FieldType.SHORT).setValue(ff));
geotiff.addTag( new IFDEntry(Tag.PageNumber, FieldType.SHORT).setValue( page, 2));
// just make it all one big "row"
geotiff.addTag( new IFDEntry(Tag.RowsPerStrip, FieldType.SHORT).setValue( height));
geotiff.addTag( new IFDEntry(Tag.StripByteCounts, FieldType.LONG).setValue( size));
// data starts here, header is written at the end
if( imageNumber == 1 )
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue( 8));
else
geotiff.addTag( new IFDEntry(Tag.StripOffsets, FieldType.LONG).setValue(nextStart));
// standard tags
geotiff.addTag( new IFDEntry(Tag.Orientation, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.Compression, FieldType.SHORT).setValue( 1)); // no compression
geotiff.addTag( new IFDEntry(Tag.Software, FieldType.ASCII).setValue( "nc2geotiff"));
geotiff.addTag( new IFDEntry(Tag.PhotometricInterpretation, FieldType.SHORT).setValue( 1)); // black is zero : not used?
geotiff.addTag( new IFDEntry(Tag.PlanarConfiguration, FieldType.SHORT).setValue( 1));
if (greyScale) {
// standard tags for Greyscale images ( see TIFF spec, section 4)
geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 8)); // 8 bits per sample
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
geotiff.addTag( new IFDEntry(Tag.XResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.YResolution, FieldType.RATIONAL).setValue(1, 1));
geotiff.addTag( new IFDEntry(Tag.ResolutionUnit, FieldType.SHORT).setValue( 1));
} else {
// standard tags for SampleFormat ( see TIFF spec, section 19)
geotiff.addTag( new IFDEntry(Tag.BitsPerSample, FieldType.SHORT).setValue( 8, 8, 8)); // 32 bits per sample
geotiff.addTag( new IFDEntry(Tag.SampleFormat, FieldType.SHORT).setValue( 3)); // Sample Format
geotiff.addTag( new IFDEntry(Tag.SamplesPerPixel, FieldType.SHORT).setValue( 1));
MAMath.MinMax dataMinMax = grid.getMinMaxSkipMissingData( data);
float min = (float) (dataMinMax.min);
float max = (float) (dataMinMax.max);
geotiff.addTag( new IFDEntry(Tag.SMinSampleValue, FieldType.FLOAT).setValue( min));
geotiff.addTag( new IFDEntry(Tag.SMaxSampleValue, FieldType.FLOAT).setValue( max));
}
/*
geotiff.addTag( new IFDEntry(Tag.Geo_ModelPixelScale, FieldType.DOUBLE).setValue(
new double[] {5.0, 2.5, 0.0} ));
geotiff.addTag( new IFDEntry(Tag.Geo_ModelTiepoint, FieldType.DOUBLE).setValue(
new double[] {0.0, 0.0, 0.0, -180.0, 90.0, 0.0 } ));
// new double[] {0.0, 0.0, 0.0, 183.0, 90.0, 0.0} ));
IFDEntry ifd = new IFDEntry(Tag.Geo_KeyDirectory, FieldType.SHORT).setValue(
new int[] {1, 1, 0, 4, 1024, 0, 1, 2, 1025, 0, 1, 1, 2048, 0, 1, 4326, 2054, 0, 1, 9102} );
geotiff.addTag( ifd);
*/
// set the transformation from projection to pixel, add tie point tag
geotiff.setTransform( xStart, yStart, xInc, yInc);
if (gcs.isLatLon())
addLatLonTags();
else if (gcs.getProjection() instanceof LambertConformal)
addLambertConformalTags((LambertConformal) gcs.getProjection(), xStart, yStart);
else if (gcs.getProjection() instanceof Stereographic)
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
else
addPolarStereographicTags((Stereographic) gcs.getProjection(), xStart, yStart);
geotiff.writeMetadata(imageNumber);
//geotiff.close();
}
|
diff --git a/src/test/java/org/got5/tapestry5/jquery/test/CarouselTest.java b/src/test/java/org/got5/tapestry5/jquery/test/CarouselTest.java
index 6a848f05..c48c6de8 100644
--- a/src/test/java/org/got5/tapestry5/jquery/test/CarouselTest.java
+++ b/src/test/java/org/got5/tapestry5/jquery/test/CarouselTest.java
@@ -1,57 +1,57 @@
//
// Copyright 2011 GOT5 (GO Tapestry 5)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.got5.tapestry5.jquery.test;
import org.apache.tapestry5.test.SeleniumTestCase;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.thoughtworks.selenium.Wait;
public class CarouselTest extends SeleniumTestCase {
@BeforeMethod
public void adjustSpeed() {
// it seems that integration test are unstable when speed is set to 0
setSpeed("200");
}
@Test
public void testCarousel() {
open("/test/carouselpage");
waitForPageToLoad();
//Content test
assertEquals(getText("css=#zoneContainer span"), "Click on the big white flower of the complex carousel:", "The page does not seem to be fully loaded");
//Carousel control test
- final int initPosition = (Integer) getElementPositionLeft("id=myComplexCarousel");
+ final int initPosition = (Integer) getElementPositionLeft("css=.jcarousel-list");
click("css=.jcarousel-next");
new Wait()
{
@Override
public boolean until()
{
- int newPosition = (Integer) getElementPositionLeft("id=myComplexCarousel");
+ int newPosition = (Integer) getElementPositionLeft("css=.jcarousel-list");
return Math.abs(initPosition - newPosition)==255;
}
}.wait("The carousel did not slide correctly", 3000);
//test of an eventlink inside a carousel
assertTrue("".equals(getText("id=flowerZone")),"The zone is not empty before the ajax call");
click("id=carouselitem_0");
assertEquals(getText("id=flowerZone"),"You just clicked on the big white flower !", "The eventlink did not work, the zone did not load correctly");
}
}
| false | true | public void testCarousel() {
open("/test/carouselpage");
waitForPageToLoad();
//Content test
assertEquals(getText("css=#zoneContainer span"), "Click on the big white flower of the complex carousel:", "The page does not seem to be fully loaded");
//Carousel control test
final int initPosition = (Integer) getElementPositionLeft("id=myComplexCarousel");
click("css=.jcarousel-next");
new Wait()
{
@Override
public boolean until()
{
int newPosition = (Integer) getElementPositionLeft("id=myComplexCarousel");
return Math.abs(initPosition - newPosition)==255;
}
}.wait("The carousel did not slide correctly", 3000);
//test of an eventlink inside a carousel
assertTrue("".equals(getText("id=flowerZone")),"The zone is not empty before the ajax call");
click("id=carouselitem_0");
assertEquals(getText("id=flowerZone"),"You just clicked on the big white flower !", "The eventlink did not work, the zone did not load correctly");
}
| public void testCarousel() {
open("/test/carouselpage");
waitForPageToLoad();
//Content test
assertEquals(getText("css=#zoneContainer span"), "Click on the big white flower of the complex carousel:", "The page does not seem to be fully loaded");
//Carousel control test
final int initPosition = (Integer) getElementPositionLeft("css=.jcarousel-list");
click("css=.jcarousel-next");
new Wait()
{
@Override
public boolean until()
{
int newPosition = (Integer) getElementPositionLeft("css=.jcarousel-list");
return Math.abs(initPosition - newPosition)==255;
}
}.wait("The carousel did not slide correctly", 3000);
//test of an eventlink inside a carousel
assertTrue("".equals(getText("id=flowerZone")),"The zone is not empty before the ajax call");
click("id=carouselitem_0");
assertEquals(getText("id=flowerZone"),"You just clicked on the big white flower !", "The eventlink did not work, the zone did not load correctly");
}
|
diff --git a/recipes/src/main/java/ru/taskurotta/recipes/wait/decider/WaitDeciderImpl.java b/recipes/src/main/java/ru/taskurotta/recipes/wait/decider/WaitDeciderImpl.java
index 21090f55..f9149ea0 100644
--- a/recipes/src/main/java/ru/taskurotta/recipes/wait/decider/WaitDeciderImpl.java
+++ b/recipes/src/main/java/ru/taskurotta/recipes/wait/decider/WaitDeciderImpl.java
@@ -1,56 +1,56 @@
package ru.taskurotta.recipes.wait.decider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.taskurotta.annotation.Asynchronous;
import ru.taskurotta.annotation.Wait;
import ru.taskurotta.core.Promise;
import ru.taskurotta.recipes.wait.worker.WaitWorkerClient;
import ru.taskurotta.test.flow.FlowArbiter;
import java.util.ArrayList;
import java.util.Collection;
/**
* Created by void 13.05.13 19:33
*/
public class WaitDeciderImpl implements WaitDecider {
protected final static Logger log = LoggerFactory.getLogger(WaitDeciderImpl.class);
private FlowArbiter arbiter;
private WaitWorkerClient worker;
private WaitDeciderImpl async;
@Override
public void start() {
arbiter.notify("start");
- Collection<Promise<Integer>> data = new ArrayList<>();
+ Collection<Promise<Integer>> data = new ArrayList<Promise<Integer>>();
//data[0] = worker.prepare();
for (int i = 0; i < 3; i++) {
data.add(worker.generate());
}
async.waitForStart(data);
}
@Asynchronous
public void waitForStart(@Wait Collection<Promise<Integer>> data) {
arbiter.notify("waitFor");
int result = 0;
for (Promise<Integer> promise : data) {
result += promise.get();
}
log.info("result : {}", result);
}
public void setWorker(WaitWorkerClient worker) {
this.worker = worker;
}
public void setAsync(WaitDeciderImpl async) {
this.async = async;
}
public void setArbiter(FlowArbiter arbiter) {
this.arbiter = arbiter;
}
}
| true | true | public void start() {
arbiter.notify("start");
Collection<Promise<Integer>> data = new ArrayList<>();
//data[0] = worker.prepare();
for (int i = 0; i < 3; i++) {
data.add(worker.generate());
}
async.waitForStart(data);
}
| public void start() {
arbiter.notify("start");
Collection<Promise<Integer>> data = new ArrayList<Promise<Integer>>();
//data[0] = worker.prepare();
for (int i = 0; i < 3; i++) {
data.add(worker.generate());
}
async.waitForStart(data);
}
|
diff --git a/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java b/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java
index 5ed20d27f..a30192bdd 100644
--- a/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java
+++ b/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java
@@ -1,8856 +1,8862 @@
/* $Id: JobManager.java 998576 2010-09-19 01:11:02Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.jobs;
import org.apache.manifoldcf.core.interfaces.*;
import org.apache.manifoldcf.agents.interfaces.*;
import org.apache.manifoldcf.crawler.interfaces.*;
import java.util.*;
import java.util.regex.*;
import org.apache.manifoldcf.crawler.system.Logging;
import org.apache.manifoldcf.crawler.system.ManifoldCF;
/** This is the main job manager. It provides methods that support both job definition, and the threads that execute the jobs.
*/
public class JobManager implements IJobManager
{
public static final String _rcsid = "@(#)$Id: JobManager.java 998576 2010-09-19 01:11:02Z kwright $";
protected static final String stufferLock = "_STUFFER_";
protected static final String deleteStufferLock = "_DELETESTUFFER_";
protected static final String expireStufferLock = "_EXPIRESTUFFER_";
protected static final String cleanStufferLock = "_CLEANSTUFFER_";
protected static final String hopLock = "_HOPLOCK_";
// Member variables
protected IDBInterface database;
protected IOutputConnectionManager outputMgr;
protected IRepositoryConnectionManager connectionMgr;
protected ILockManager lockManager;
protected IThreadContext threadContext;
protected JobQueue jobQueue;
protected Jobs jobs;
protected HopCount hopCount;
protected Carrydown carryDown;
protected EventManager eventManager;
protected static Random random = new Random();
/** Constructor.
*@param threadContext is the thread context.
*@param database is the database.
*/
public JobManager(IThreadContext threadContext, IDBInterface database)
throws ManifoldCFException
{
this.database = database;
this.threadContext = threadContext;
jobs = new Jobs(threadContext,database);
jobQueue = new JobQueue(threadContext,database);
hopCount = new HopCount(threadContext,database);
carryDown = new Carrydown(database);
eventManager = new EventManager(database);
outputMgr = OutputConnectionManagerFactory.make(threadContext);
connectionMgr = RepositoryConnectionManagerFactory.make(threadContext);
lockManager = LockManagerFactory.make(threadContext);
}
/** Install.
*/
public void install()
throws ManifoldCFException
{
jobs.install(outputMgr.getTableName(),outputMgr.getConnectionNameColumn(),connectionMgr.getTableName(),connectionMgr.getConnectionNameColumn());
jobQueue.install(jobs.getTableName(),jobs.idField);
hopCount.install(jobs.getTableName(),jobs.idField);
carryDown.install(jobs.getTableName(),jobs.idField);
eventManager.install();
}
/** Uninstall.
*/
public void deinstall()
throws ManifoldCFException
{
eventManager.deinstall();
carryDown.deinstall();
hopCount.deinstall();
jobQueue.deinstall();
jobs.deinstall();
}
/** Export configuration */
public void exportConfiguration(java.io.OutputStream os)
throws java.io.IOException, ManifoldCFException
{
// Write a version indicator
ManifoldCF.writeDword(os,3);
// Get the job list
IJobDescription[] list = getAllJobs();
// Write the number of authorities
ManifoldCF.writeDword(os,list.length);
// Loop through the list and write the individual repository connection info
int i = 0;
while (i < list.length)
{
IJobDescription job = list[i++];
ManifoldCF.writeString(os,job.getConnectionName());
ManifoldCF.writeString(os,job.getOutputConnectionName());
ManifoldCF.writeString(os,job.getDescription());
ManifoldCF.writeDword(os,job.getType());
ManifoldCF.writeDword(os,job.getStartMethod());
ManifoldCF.writeLong(os,job.getInterval());
ManifoldCF.writeLong(os,job.getExpiration());
ManifoldCF.writeLong(os,job.getReseedInterval());
ManifoldCF.writeDword(os,job.getPriority());
ManifoldCF.writeDword(os,job.getHopcountMode());
ManifoldCF.writeString(os,job.getSpecification().toXML());
ManifoldCF.writeString(os,job.getOutputSpecification().toXML());
// Write schedule
int recCount = job.getScheduleRecordCount();
ManifoldCF.writeDword(os,recCount);
int j = 0;
while (j < recCount)
{
ScheduleRecord sr = job.getScheduleRecord(j++);
writeEnumeratedValues(os,sr.getDayOfWeek());
writeEnumeratedValues(os,sr.getMonthOfYear());
writeEnumeratedValues(os,sr.getDayOfMonth());
writeEnumeratedValues(os,sr.getYear());
writeEnumeratedValues(os,sr.getHourOfDay());
writeEnumeratedValues(os,sr.getMinutesOfHour());
ManifoldCF.writeString(os,sr.getTimezone());
ManifoldCF.writeLong(os,sr.getDuration());
ManifoldCF.writeByte(os,sr.getRequestMinimum()?1:0);
}
// Write hop count filters
Map filters = job.getHopCountFilters();
ManifoldCF.writeDword(os,filters.size());
Iterator iter = filters.keySet().iterator();
while (iter.hasNext())
{
String linkType = (String)iter.next();
Long hopcount = (Long)filters.get(linkType);
ManifoldCF.writeString(os,linkType);
ManifoldCF.writeLong(os,hopcount);
}
}
}
protected static void writeEnumeratedValues(java.io.OutputStream os, EnumeratedValues ev)
throws java.io.IOException
{
if (ev == null)
{
ManifoldCF.writeSdword(os,-1);
return;
}
int size = ev.size();
ManifoldCF.writeSdword(os,size);
Iterator iter = ev.getValues();
while (iter.hasNext())
{
ManifoldCF.writeDword(os,((Integer)iter.next()).intValue());
}
}
/** Import configuration */
public void importConfiguration(java.io.InputStream is)
throws java.io.IOException, ManifoldCFException
{
int version = ManifoldCF.readDword(is);
if (version != 2 && version != 3)
throw new java.io.IOException("Unknown job configuration version: "+Integer.toString(version));
int count = ManifoldCF.readDword(is);
int i = 0;
while (i < count)
{
IJobDescription job = createJob();
job.setConnectionName(ManifoldCF.readString(is));
job.setOutputConnectionName(ManifoldCF.readString(is));
job.setDescription(ManifoldCF.readString(is));
job.setType(ManifoldCF.readDword(is));
job.setStartMethod(ManifoldCF.readDword(is));
job.setInterval(ManifoldCF.readLong(is));
job.setExpiration(ManifoldCF.readLong(is));
job.setReseedInterval(ManifoldCF.readLong(is));
job.setPriority(ManifoldCF.readDword(is));
job.setHopcountMode(ManifoldCF.readDword(is));
job.getSpecification().fromXML(ManifoldCF.readString(is));
job.getOutputSpecification().fromXML(ManifoldCF.readString(is));
// Read schedule
int recCount = ManifoldCF.readDword(is);
int j = 0;
while (j < recCount)
{
EnumeratedValues dayOfWeek = readEnumeratedValues(is);
EnumeratedValues monthOfYear = readEnumeratedValues(is);
EnumeratedValues dayOfMonth = readEnumeratedValues(is);
EnumeratedValues year = readEnumeratedValues(is);
EnumeratedValues hourOfDay = readEnumeratedValues(is);
EnumeratedValues minutesOfHour = readEnumeratedValues(is);
String timezone = ManifoldCF.readString(is);
Long duration = ManifoldCF.readLong(is);
boolean requestMinimum;
if (version >= 3)
requestMinimum = (ManifoldCF.readByte(is) != 0);
else
requestMinimum = false;
ScheduleRecord sr = new ScheduleRecord(dayOfWeek, monthOfYear, dayOfMonth, year,
hourOfDay, minutesOfHour, timezone, duration, requestMinimum);
job.addScheduleRecord(sr);
j++;
}
// Read hop count filters
int hopFilterCount = ManifoldCF.readDword(is);
j = 0;
while (j < hopFilterCount)
{
String linkType = ManifoldCF.readString(is);
Long hopcount = ManifoldCF.readLong(is);
job.addHopCountFilter(linkType,hopcount);
j++;
}
// Attempt to save this job
save(job);
i++;
}
}
protected EnumeratedValues readEnumeratedValues(java.io.InputStream is)
throws java.io.IOException
{
int size = ManifoldCF.readSdword(is);
if (size == -1)
return null;
int[] values = new int[size];
int i = 0;
while (i < size)
{
values[i++] = ManifoldCF.readDword(is);
}
return new EnumeratedValues(values);
}
/** Note the deregistration of a connector used by the specified connections.
* This method will be called when the connector is deregistered. Jobs that use these connections
* must therefore enter appropriate states.
*@param connectionNames is the set of connection names.
*/
public void noteConnectorDeregistration(String[] connectionNames)
throws ManifoldCFException
{
// For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status.
ArrayList list = new ArrayList();
int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{});
int currentCount = 0;
int i = 0;
while (i < connectionNames.length)
{
if (currentCount == maxCount)
{
noteConnectionDeregistration(list);
list.clear();
currentCount = 0;
}
list.add(connectionNames[i++]);
currentCount++;
}
if (currentCount > 0)
noteConnectionDeregistration(list);
}
/** Note deregistration for a batch of connection names.
*/
protected void noteConnectionDeregistration(ArrayList list)
throws ManifoldCFException
{
ArrayList newList = new ArrayList();
String query = database.buildConjunctionClause(newList,new ClauseDescription[]{
new MultiClause(jobs.connectionNameField,list)});
// Query for the matching jobs, and then for each job potentially adjust the state
IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+
jobs.getTableName()+" WHERE "+query+" FOR UPDATE",
newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField));
jobs.noteConnectorDeregistration(jobID,statusValue);
}
}
/** Note the registration of a connector used by the specified connections.
* This method will be called when a connector is registered, on which the specified
* connections depend.
*@param connectionNames is the set of connection names.
*/
public void noteConnectorRegistration(String[] connectionNames)
throws ManifoldCFException
{
// For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status.
ArrayList list = new ArrayList();
int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{});
int currentCount = 0;
int i = 0;
while (i < connectionNames.length)
{
if (currentCount == maxCount)
{
noteConnectionRegistration(list);
list.clear();
currentCount = 0;
}
list.add(connectionNames[i++]);
currentCount++;
}
if (currentCount > 0)
noteConnectionRegistration(list);
}
/** Note registration for a batch of connection names.
*/
protected void noteConnectionRegistration(ArrayList list)
throws ManifoldCFException
{
// Query for the matching jobs, and then for each job potentially adjust the state
ArrayList newList = new ArrayList();
String query = database.buildConjunctionClause(newList,new ClauseDescription[]{
new MultiClause(jobs.connectionNameField,list)});
IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+
jobs.getTableName()+" WHERE "+query+" FOR UPDATE",
newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField));
jobs.noteConnectorRegistration(jobID,statusValue);
}
}
/** Note a change in connection configuration.
* This method will be called whenever a connection's configuration is modified, or when an external repository change
* is signalled.
*/
public void noteConnectionChange(String connectionName)
throws ManifoldCFException
{
jobs.noteConnectionChange(connectionName);
}
/** Note the deregistration of an output connector used by the specified connections.
* This method will be called when the connector is deregistered. Jobs that use these connections
* must therefore enter appropriate states.
*@param connectionNames is the set of connection names.
*/
public void noteOutputConnectorDeregistration(String[] connectionNames)
throws ManifoldCFException
{
// For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status.
ArrayList list = new ArrayList();
int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{});
int currentCount = 0;
int i = 0;
while (i < connectionNames.length)
{
if (currentCount == maxCount)
{
noteOutputConnectionDeregistration(list);
list.clear();
currentCount = 0;
}
list.add(connectionNames[i++]);
currentCount++;
}
if (currentCount > 0)
noteOutputConnectionDeregistration(list);
}
/** Note deregistration for a batch of output connection names.
*/
protected void noteOutputConnectionDeregistration(ArrayList list)
throws ManifoldCFException
{
ArrayList newList = new ArrayList();
String query = database.buildConjunctionClause(newList,new ClauseDescription[]{
new MultiClause(jobs.outputNameField,list)});
// Query for the matching jobs, and then for each job potentially adjust the state
IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+
jobs.getTableName()+" WHERE "+query+" FOR UPDATE",
newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField));
jobs.noteOutputConnectorDeregistration(jobID,statusValue);
}
}
/** Note the registration of an output connector used by the specified connections.
* This method will be called when a connector is registered, on which the specified
* connections depend.
*@param connectionNames is the set of connection names.
*/
public void noteOutputConnectorRegistration(String[] connectionNames)
throws ManifoldCFException
{
// For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status.
ArrayList list = new ArrayList();
int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{});
int currentCount = 0;
int i = 0;
while (i < connectionNames.length)
{
if (currentCount == maxCount)
{
noteOutputConnectionRegistration(list);
list.clear();
currentCount = 0;
}
list.add(connectionNames[i++]);
currentCount++;
}
if (currentCount > 0)
noteOutputConnectionRegistration(list);
}
/** Note registration for a batch of output connection names.
*/
protected void noteOutputConnectionRegistration(ArrayList list)
throws ManifoldCFException
{
ArrayList newList = new ArrayList();
String query = database.buildConjunctionClause(newList,new ClauseDescription[]{
new MultiClause(jobs.outputNameField,list)});
// Query for the matching jobs, and then for each job potentially adjust the state
IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+
jobs.getTableName()+" WHERE "+query+" FOR UPDATE",
newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField));
jobs.noteOutputConnectorRegistration(jobID,statusValue);
}
}
/** Note a change in output connection configuration.
* This method will be called whenever a connection's configuration is modified, or when an external target config change
* is signalled.
*/
public void noteOutputConnectionChange(String connectionName)
throws ManifoldCFException
{
jobs.noteOutputConnectionChange(connectionName);
}
/** Load a sorted list of job descriptions.
*@return the list, sorted by description.
*/
public IJobDescription[] getAllJobs()
throws ManifoldCFException
{
return jobs.getAll();
}
/** Create a new job.
*@return the new job.
*/
public IJobDescription createJob()
throws ManifoldCFException
{
return jobs.create();
}
/** Get the hoplock for a given job ID */
protected String getHopLockName(Long jobID)
{
return hopLock + jobID;
}
/** Delete a job.
*@param id is the job's identifier. This method will purge all the records belonging to the job from the database, as
* well as remove all documents indexed by the job from the index.
*/
public void deleteJob(Long id)
throws ManifoldCFException
{
database.beginTransaction();
try
{
// If the job is running, throw an error
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,id)});
IResultSet set = database.performQuery("SELECT "+jobs.statusField+" FROM "+
jobs.getTableName()+" WHERE "+query+" FOR UPDATE",list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("Attempting to delete a job that doesn't exist: "+id);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
if (status == jobs.STATUS_ACTIVE || status == jobs.STATUS_ACTIVESEEDING ||
status == jobs.STATUS_ACTIVE_UNINSTALLED || status == jobs.STATUS_ACTIVESEEDING_UNINSTALLED ||
status == jobs.STATUS_ACTIVE_NOOUTPUT || status == jobs.STATUS_ACTIVESEEDING_NOOUTPUT ||
status == jobs.STATUS_ACTIVE_NEITHER || status == jobs.STATUS_ACTIVESEEDING_NEITHER)
throw new ManifoldCFException("Job "+id+" is active; you must shut it down before deleting it");
if (status != jobs.STATUS_INACTIVE)
throw new ManifoldCFException("Job "+id+" is busy; you must wait and/or shut it down before deleting it");
jobs.writeStatus(id,jobs.STATUS_READYFORDELETE);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+id+" marked for deletion");
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Load a job for editing.
*@param id is the job's identifier.
*@return null if the job doesn't exist.
*/
public IJobDescription load(Long id)
throws ManifoldCFException
{
return jobs.load(id,false);
}
/** Load a job.
*@param id is the job's identifier.
*@param readOnly is true if a read-only object is desired.
*@return null if the job doesn't exist.
*/
public IJobDescription load(Long id, boolean readOnly)
throws ManifoldCFException
{
return jobs.load(id,readOnly);
}
/** Save a job.
*@param jobDescription is the job description.
*/
public void save(IJobDescription jobDescription)
throws ManifoldCFException
{
ManifoldCF.noteConfigurationChange();
jobs.save(jobDescription);
}
/** See if there's a reference to a connection name.
*@param connectionName is the name of the connection.
*@return true if there is a reference, false otherwise.
*/
public boolean checkIfReference(String connectionName)
throws ManifoldCFException
{
return jobs.checkIfReference(connectionName);
}
/** See if there's a reference to an output connection name.
*@param connectionName is the name of the connection.
*@return true if there is a reference, false otherwise.
*/
public boolean checkIfOutputReference(String connectionName)
throws ManifoldCFException
{
return jobs.checkIfOutputReference(connectionName);
}
/** Get the job IDs associated with a given connection name.
*@param connectionName is the name of the connection.
*@return the set of job id's associated with that connection.
*/
public IJobDescription[] findJobsForConnection(String connectionName)
throws ManifoldCFException
{
return jobs.findJobsForConnection(connectionName);
}
// These methods cover activities that require interaction with the job queue.
// The job queue is maintained underneath this interface, and all threads that perform
// job activities need to go through this layer.
/** Reset the job queue immediately after starting up.
* If the system was shut down in the middle of a job, sufficient information should
* be around in the database to allow it to restart. However, BEFORE all the job threads
* are spun up, there needs to be a pass over the queue to bring things back to a "normal"
* state.
* Also, if a job's status is in a state that indicates it was being processed by a thread
* (which is now dead), then we have to set that status back to previous value.
*/
public void prepareForStart()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting due to restart");
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Clean up events
eventManager.restart();
// Clean up job queue
jobQueue.restart();
// Clean up jobs
jobs.restart();
// Clean up hopcount stuff
hopCount.reset();
// Clean up carrydown stuff
carryDown.reset();
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
Logging.jobs.debug("Reset complete");
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset as part of restoring document worker threads.
*/
public void resetDocumentWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting document active status");
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobQueue.resetDocumentWorkerStatus();
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting document active status: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring seeding threads.
*/
public void resetSeedingWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting seeding status");
jobs.resetSeedingWorkerStatus();
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring doc delete threads.
*/
public void resetDocDeleteWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting doc deleting status");
TrackerClass.notePrecommit();
jobQueue.resetDocDeleteWorkerStatus();
TrackerClass.noteCommit();
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring doc cleanup threads.
*/
public void resetDocCleanupWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting doc cleaning status");
TrackerClass.notePrecommit();
jobQueue.resetDocCleanupWorkerStatus();
TrackerClass.noteCommit();
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring delete startup threads.
*/
public void resetDeleteStartupWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting job delete starting up status");
jobs.resetDeleteStartupWorkerStatus();
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring notification threads.
*/
public void resetNotificationWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting notification up status");
jobs.resetNotificationWorkerStatus();
Logging.jobs.debug("Reset complete");
}
/** Reset as part of restoring startup threads.
*/
public void resetStartupWorkerStatus()
throws ManifoldCFException
{
Logging.jobs.debug("Resetting job starting up status");
jobs.resetStartupWorkerStatus();
Logging.jobs.debug("Reset complete");
}
// These methods support job delete threads
/** Delete ingested document identifiers (as part of deleting the owning job).
* The number of identifiers specified is guaranteed to be less than the maxInClauseCount
* for the database.
*@param identifiers is the set of document identifiers.
*/
public void deleteIngestedDocumentIdentifiers(DocumentDescription[] identifiers)
throws ManifoldCFException
{
jobQueue.deleteIngestedDocumentIdentifiers(identifiers);
// Hopcount rows get removed when the job itself is removed.
// carrydown records get removed when the job itself is removed.
}
/** Get list of cleanable document descriptions. This list will take into account
* multiple jobs that may own the same document. All documents for which a description
* is returned will be transitioned to the "beingcleaned" state. Documents which are
* not in transition and are eligible, but are owned by other jobs, will have their
* jobqueue entries deleted by this method.
*@param maxCount is the maximum number of documents to return.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@return the document descriptions for these documents.
*/
public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which belong to a job that's in a "shutting down" state and are in
// a "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM
// jobs t3 WHERE t0.jobid=t3.id AND t3.status='X')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.cleaningJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the cleaning queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock. This means we don't need a FOR UPDATE on the query.
lockManager.enterWriteLock(cleanStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned".
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ")
.append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name and output connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.setCleaningStatus(dd.getID());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return new DocumentSetAndFlags(rval,rvalBoolean);
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(cleanStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Create a composite document hash key. This consists of the document id hash plus the
* connection name.
*/
protected static String makeCompositeID(String docIDHash, String connectionName)
{
return docIDHash + ":" + connectionName;
}
/** Get list of deletable document descriptions. This list will take into account
* multiple jobs that may own the same document. All documents for which a description
* is returned will be transitioned to the "beingdeleted" state. Documents which are
* not in transition and are eligible, but are owned by other jobs, will have their
* jobqueue entries deleted by this method.
*@param maxCount is the maximum number of documents to return.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@return the document descriptions for these documents.
*/
public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which either belong to a job that's in a "delete pending" state and are in
// a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job
// that's in a "shutting down" state and are in the "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='D')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.deletingJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the delete queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock so that multiple threads can't be in here at the same time
lockManager.enterWriteLock(deleteStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted".
// If FOR UPDATE was included, deadlock happened a lot.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ")
.append("t0.").append(jobQueue.checkTimeField).append("<=? AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash to reduce chances of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()];
int j = 0;
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocumentID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID);
if (allowedDocIds.get(compositeDocumentID) == null)
{
// Delete this record and do NOT return it.
jobQueue.deleteRecord(dd.getID());
// What should we do about hopcount here?
// We are deleting a record which belongs to a job that is being
// cleaned up. The job itself will go away when this is done,
// and so will all the hopcount stuff pertaining to it. So, the
// treatment I've chosen here is to leave the hopcount alone and
// let the job cleanup get rid of it at the right time.
// Note: carrydown records handled in the same manner...
//carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()});
}
else
{
// Set the record status to "being deleted" and return it
rval[j++] = dd;
jobQueue.setDeletingStatus(dd.getID());
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return rval;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(deleteStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Get a list of document identifiers that should actually be deleted from the index, from a list that
* might contain identifiers that are shared with other jobs, which are targeted to the same output connection.
* The input list is guaranteed to be smaller in size than maxInClauseCount for the database.
*@param documentIdentifiers is the set of document identifiers to consider.
*@param connectionName is the connection name for ALL the document identifiers.
*@param outputConnectionName is the output connection name for ALL the document identifiers.
*@return the set of documents which should be removed from the index.
*/
protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName)
throws ManifoldCFException
{
// This is where we will count the individual document id's
HashMap countMap = new HashMap();
// First thing: Compute the set of document identifier hash values to query against
HashMap map = new HashMap();
int i = 0;
while (i < documentIdentifiers.length)
{
String hash = documentIdentifiers[i++].getDocumentIdentifierHash();
map.put(hash,hash);
countMap.put(hash,new MutableInteger(0));
}
if (map.size() == 0)
return new String[0];
// Build a query
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
ArrayList docList = new ArrayList();
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
docList.add(iter.next());
}
// Note: There is a potential race condition here. One job may be running while another is in process of
// being deleted. If they share a document, then the delete task could decide to delete the document and do so right
// after the ingestion takes place in the running job, but right before the document's status is updated
// in the job queue [which would have prevented the deletion].
// Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea)
// we are stuck with the possibility of this condition, which will essentially lead to a document being
// missing from the index.
// One way of dealing with this is to treat "active" documents as already ingested, for the purpose of
// reference counting. Then these documents will not be deleted. The risk then becomes that the "active"
// document entry will not be completed (say, because of a restart), and thus the corresponding document
// will never be removed from the index.
//
// Instead, the only solution is to not queue a document for any activity that is inconsistent with activities
// that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED"
// and "BEING_CLEANED" state
// for a document. These states will allow the various queries that queue up activities to avoid documents that
// are currently being processed elsewhere.
sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ")
.append("t1.").append(jobs.connectionNameField).append("=? AND ")
.append("t1.").append(jobs.outputNameField).append("=?)");
list.add(connectionName);
list.add(outputConnectionName);
// Do the query, and then count the number of times each document identifier occurs.
IResultSet results = database.performQuery(sb.toString(),list,null,null);
i = 0;
while (i < results.getRowCount())
{
IResultRow row = results.getRow(i++);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi != null)
mi.increment();
}
// Go through and count only those that have a count of 1.
int count = 0;
iter = countMap.keySet().iterator();
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
count++;
}
String[] rval = new String[count];
iter = countMap.keySet().iterator();
count = 0;
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
rval[count++] = docIDHash;
}
return rval;
}
// These methods support the reprioritization thread.
/** Get a list of already-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
// The desired query is:
// SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n)
sb.append("SELECT ")
.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_COMPLETE),
jobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" ");
sb.append(database.constructOffsetLimitClause(0,n));
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
// This query MUST return only documents that are in a pending state which belong to an active job!!!
sb.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED),
JobQueue.statusToString(jobQueue.STATUS_PENDING),
JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ")
.append(jobQueue.checkActionField).append("=?").append(" AND ");
list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN));
// Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them,
// so this will be changed to not include jobs where the priorities have been bashed to null.
//
// I've included ALL states that might have non-null doc priorities. This includes states
// corresponding to uninstalled connectors, since there is no transition that cleans out the
// document priorities in these states. The time during which a connector is uninstalled is
// expected to be short, because typically this state is the result of an installation procedure
// rather than willful action on the part of a user.
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_STARTINGUP),
Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL),
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER)
}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(database.constructOffsetLimitClause(0,n));
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Save a set of document priorities. In the case where a document was eligible to have its
* priority set, but it no longer is eligible, then the provided priority will not be written.
*@param currentTime is the time in milliseconds since epoch.
*@param documentDescriptions are the document descriptions.
*@param priorities are the desired priorities.
*/
public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities)
throws ManifoldCFException
{
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
// This should be ordered by document identifier hash in order to prevent potential deadlock conditions
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Need to order the writes by doc id.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
DocumentDescription dd = documentDescriptions[index];
double priority = priorities[index];
jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString());
i++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get up to the next n documents to be expired.
* This method marks the documents whose descriptions have been returned as "being processed", or active.
* The same marking is used as is used for documents that have been queued for worker threads. The model
* is thus identical.
*
*@param n is the maximum number of records desired.
*@param currentTime is the current time.
*@return the array of document descriptions to expire.
*/
public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime)
throws ManifoldCFException
{
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning query to look for documents to expire");
}
// Put together a query with a limit of n
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.jobIDField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField).append(",")
.append("t0.").append(jobQueue.statusField).append(",")
.append("t0.").append(jobQueue.failTimeField).append(",")
.append("t0.").append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,n));
String query = sb.toString();
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
ArrayList answers = new ArrayList();
int repeatCount = 0;
while (true)
{
long sleepAmt = 0L;
// Enter a write lock, so only one thread can be doing this. That makes FOR UPDATE unnecessary.
lockManager.enterWriteLock(expireStufferLock);
try
{
if (Logging.perf.isDebugEnabled())
{
repeatCount++;
Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+
new Long(System.currentTimeMillis() - startTime)+" ms");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(query,list,null,null,n,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
statusMap.put(compositeDocumentID,new Integer(status));
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return new DocumentSetAndFlags(rval, rvalBoolean);
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(expireStufferLock);
sleepFor(sleepAmt);
}
}
}
// This method supports the "queue stuffer" thread
/**
/** Get up to the next n document(s) to be fetched and processed.
* This fetch returns records that contain the document identifier, plus all instructions
* pertaining to the document's handling (e.g. whether it should be refetched if the version
* has not changed).
* This method also marks the documents whose descriptions have be returned as "being processed".
*@param n is the maximum number of records desired.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@param interval is the number of milliseconds that this set of documents should represent (for throttling).
*@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization,
* but could not be queued due to throttling considerations.
*@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing
* so that individual connections are not overwhelmed.
*@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due
* to being overcommitted.
*@return the array of document descriptions to fetch and process.
*/
public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval,
BlockingDocuments blockingDocuments, PerformanceStatistics statistics,
DepthStatistics scanRecord)
throws ManifoldCFException
{
// NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in
// a given interval. Therefore, the returned result has the following constraints on it:
// 1) There must be no more than n documents returned total;
// 2) For any given job that is throttled, the total number of documents returned must be
// consistent with the time interval provided.
// In general, this requires the database layer to perform fairly advanced filtering on the
// the result, far in excess of a simple count. An implementation of an interface is therefore
// going to need to be passed into the performQuery() operation, which prunes the resultset
// as it is being read into memory. That's a new feature that will need to be added to the
// database layer.
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to queue");
}
// Below there used to be one large transaction, with multiple read seconds and multiple write sections.
// As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this
// transaction up. However, the transaction depended for its correctness in throttling on making sure
// that the throttles that were built were based on the same active jobs that the subsequent queries
// that did the stuffing relied upon. This made reorganization impossible until I realized that with
// Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more
// robust solution.
//
// Specifically, I chose to change the way documents were queued so that only documents from properly
// throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track
// the very knowledge of an active job. This had the additional benefit of meaning there was no chance of
// a query occurring from inside a resultset filter.
//
// But, after I did this, it was no longer necessary to have such a large transaction either.
// Anything older than 10 minutes ago is considered eligible for reprioritization.
long prioritizationTime = currentTime - 60000L * 10L;
ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime);
IResultSet jobconnections = jobs.getActiveJobConnections();
HashMap connectionSet = new HashMap();
int i = 0;
while (i < jobconnections.getRowCount())
{
IResultRow row = jobconnections.getRow(i++);
Long jobid = (Long)row.getValue("jobid");
String connectionName = (String)row.getValue("connectionname");
vList.addJob(jobid,connectionName);
connectionSet.put(connectionName,connectionName);
}
// Find the active connection names. We'll load these, and then get throttling info
// from each one.
String[] activeConnectionNames = new String[connectionSet.size()];
Iterator iter = connectionSet.keySet().iterator();
i = 0;
while (iter.hasNext())
{
activeConnectionNames[i++] = (String)iter.next();
}
IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames);
// Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment
// factor and set the connection limits.
HashMap rawFetchCounts = new HashMap();
double rawFetchCountTotal = 0.0;
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
int maxConnections = connection.getMaxConnections();
double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName);
double weightedRawFetchCount = avgFetchRate * (double)maxConnections;
// Keep the avg rate for later use, since it may get updated before next time we need it.
rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount));
rawFetchCountTotal += weightedRawFetchCount;
}
// Calculate an adjustment factor
double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal;
// For each job, we must amortize the maximum number of fetches per ms to the actual interval,
// and also randomly select an extra fetch based on the fractional probability. (This latter is
// necessary for the case where the maximum fetch rate is specified to be pretty low.)
//
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
// Check if throttled...
String[] throttles = connection.getThrottles();
int k = 0;
while (k < throttles.length)
{
// The key is the regexp value itself
String throttle = throttles[k++];
float throttleValue = connection.getThrottleValue(throttle);
// For the given connection, set the fetch limit per bin. This is calculated using the time interval
// and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch"
// on a weighted random basis.
//
// In the future, the connection may specify tuples which pair a regexp describing a set of bins against
// a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum
// count.
double fetchesPerTimeInterval = (double)throttleValue * (double)interval;
// Actual amount will be the integer value of this, plus an additional 1 if the random number aligns
int fetches = (int)fetchesPerTimeInterval;
fetchesPerTimeInterval -= (double)fetches;
if (random.nextDouble() <= fetchesPerTimeInterval)
fetches++;
// Save the limit in the ThrottleLimit structure
vList.addLimit(connectionName,throttle,fetches);
}
// For the overall connection, we also have a limit which is based on the number of connections there are actually available.
Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName);
double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor;
// Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing
// the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots
// of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment.
// One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much.
//
// Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum.
int fetchCount = ((int)adjustedFetchCount) + 5;
vList.setConnectionLimit(connectionName,fetchCount);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue");
// System.out.println("Done building throttle structure");
// Locate records.
// Note that we do NOT want to get everything there is to know about the job
// using this query, since the file specification may be large and expensive
// to parse. We will load a (cached) copy of the job description for that purpose.
//
// NOTE: This query deliberately excludes documents which may be being processed by another job.
// (It actually excludes a bit more than that, because the exact query is impossible to write given
// the fact that document id's cannot be compared.) These are documents where there is ANOTHER
// document entry with the same hash value, a different job id, and a status which is either "active",
// "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or
// whether the document id's are in fact the same, and therefore may temporarily block legitimate document
// activity under rare circumstances.)
//
// The query I want is:
// SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx
// AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='A')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','D'))
// ORDER BY docpriority ASC LIMIT xxx
//
// NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every
// document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at
// one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents
// without working with a monster resultset.
//
// I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index
// to pull its results in an ordered fashion
//
//
// Another subtlety is that I *must* mark the documents active as I find them, so that they do not
// have any chance of getting returned twice.
// Accumulate the answers here
ArrayList answers = new ArrayList();
// The current time value
Long currentTimeValue = new Long(currentTime);
// Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance.
// This turned out to be needed in postgresql 8.3, even though 8.2 worked fine.
//jobQueue.unconditionallyAnalyzeTables();
// Loop through priority values
int currentPriority = 1;
boolean isDone = false;
while (!isDone && currentPriority <= 10)
{
if (jobs.hasPriorityJobs(currentPriority))
{
Long currentPriorityValue = new Long((long)currentPriority);
fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections);
isDone = !vList.checkContinue();
}
currentPriority++;
}
// Assert the blocking documents we discovered
vList.tallyBlockingDocuments(blockingDocuments);
// Convert the saved answers to an array
DocumentDescription[] rval = new DocumentDescription[answers.size()];
i = 0;
while (i < rval.length)
{
rval[i] = (DocumentDescription)answers.get(i);
i++;
}
// After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment.
// This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are
// higher than the current level that is currently being dequeued.
//
// The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current
// document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs'
// job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well.
// These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary
// to know what classes a document belonged to in order to be able to calculate its priority.
//
// An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc.
// That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal.
// Find the one row from a live job that has the best document priority, which is available within the current time window.
// Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even
// germane at the moment.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause(jobQueue.statusField,
new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField},
true)).append(" ")
.append(database.constructOffsetLimitClause(0,1,true));
IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null);
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue)
scanRecord.addBins(docPriority);
}
return rval;
}
/** Fetch and process documents matching the passed-in criteria */
protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue,
ThrottleLimit vList, IRepositoryConnection[] connections)
throws ManifoldCFException
{
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query.
// So, instead, as part of CONNECTORS-781, I've introduced a write lock for the pertinent section.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT t0.");
sb.append(jobQueue.idField).append(",t0.");
if (Logging.scheduling.isDebugEnabled())
sb.append(jobQueue.docPriorityField).append(",t0.");
sb.append(jobQueue.jobIDField).append(",t0.")
.append(jobQueue.docHashField).append(",t0.")
.append(jobQueue.docIDField).append(",t0.")
.append(jobQueue.statusField).append(",t0.")
.append(jobQueue.failTimeField).append(",t0.")
.append(jobQueue.failCountField).append(",t0.")
.append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField),
new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
// Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name)
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.")
.append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.")
.append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField)
.append(")");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
"t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField},
true)).append(" ");
// Before entering the transaction, we must provide the throttlelimit object with all the connector
// instances it could possibly need. The purpose for doing this is to prevent a deadlock where
// connector starvation causes database lockup.
//
// The preallocation of multiple connector instances is certainly a worry. If any other part
// of the code allocates multiple connector instances also, the potential exists for this to cause
// deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple"
// at the connector factory level to make sure these requests are properly ordered.
String[] orderingKeys = new String[connections.length];
String[] classNames = new String[connections.length];
ConfigParams[] configParams = new ConfigParams[connections.length];
int[] maxConnections = new int[connections.length];
int k = 0;
while (k < connections.length)
{
IRepositoryConnection connection = connections[k];
orderingKeys[k] = connection.getName();
classNames[k] = connection.getClassName();
configParams[k] = connection.getConfigParams();
maxConnections[k] = connection.getMaxConnections();
k++;
}
// Never sleep with a resource locked!
while (true)
{
long sleepAmt = 0L;
// Write lock insures that only one thread cluster-wide can be doing this at a given time, so FOR UPDATE is unneeded.
lockManager.enterWriteLock(stufferLock);
try
{
IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections);
try
{
// Hand the connectors off to the ThrottleLimit instance
k = 0;
while (k < connections.length)
{
vList.addConnectionName(connections[k].getName(),connectors[k]);
k++;
}
// Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap
int limitValue = vList.getRemainingDocuments();
sb.append(database.constructOffsetLimitClause(0,limitValue,true));
if (Logging.perf.isDebugEnabled())
{
Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+
" (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
String[] docIDHashes = new String[set.getRowCount()];
Map storageMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long id = (Long)row.getValue(jobQueue.idField);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
String docID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = -1;
else
failCount = (int)failCountValue.longValue();
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount);
docIDHashes[i] = docIDHash + ":" + jobID;
storageMap.put(docIDHashes[i],dd);
statusMap.put(docIDHashes[i],new Integer(status));
if (Logging.scheduling.isDebugEnabled())
{
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list");
}
i++;
}
// No duplicates are possible here
java.util.Arrays.sort(docIDHashes);
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash);
Long id = dd.getID();
int status = ((Integer)statusMap.get(docIDHash)).intValue();
// Set status to "ACTIVE".
jobQueue.updateActiveRecord(id,status);
answers.add(dd);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
RepositoryConnectorFactory.releaseMultiple(connectors);
}
}
finally
{
lockManager.leaveWriteLock(stufferLock);
sleepFor(sleepAmt);
}
}
}
// These methods support the individual fetch/process threads.
/** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy.
* The query done within MUST be cached in order to not cause undue performance degradation.
*@param jobID is the job identifier.
*@return true if the job is in one of the "active" states.
*/
public boolean checkJobActive(Long jobID)
throws ManifoldCFException
{
return jobs.checkJobActive(jobID);
}
/** Verify if a job is still processing documents, or no longer has any outstanding active documents */
public boolean checkJobBusy(Long jobID)
throws ManifoldCFException
{
return jobQueue.checkJobBusy(jobID);
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescriptions are the description objects for the documents that were processed.
*/
public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions)
throws ManifoldCFException
{
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Do one row at a time, to avoid deadlocking things
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
jobQueue.updateCompletedRecord(dd.getID(),status);
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescription is the description object for the document that was processed.
*/
public void markDocumentCompleted(DocumentDescription documentDescription)
throws ManifoldCFException
{
markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription});
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// It's no longer an issue to have to deal with documents being conditionally deleted; that's been
// taken over by the hopcountremoval method below. So just use the simple 'delete' functionality.
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// For each record, we're going to have to choose between marking it as "hopcount removed", and marking
// it for rescan. So the basic flow will involve changing a document's status,.
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Do one row at a time, to avoid deadlocking things
List<String> deleteList = new ArrayList<String>();
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status);
if (didDelete)
{
deleteList.add(dd.getDocumentIdentifierHash());
}
}
i++;
}
String[] docIDSimpleHashes = new String[deleteList.size()];
for (int j = 0; j < docIDSimpleHashes.length; j++)
{
docIDSimpleHashes[j] = deleteList.get(j);
}
// Next, find the documents that are affected by carrydown deletion.
DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Since hopcount inheritance and prerequisites came from the addDocument() method,
// we don't delete them here.
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete documents with no repercussions. We don't have to worry about the current state of each document,
* since the document is definitely going away.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new DocumentDescription[0];
// Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else.
// In all cases, the state of the document excludes other activity.
// The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add
// a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid
// the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere
// else.
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString());
}
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort by doc hash, to establish non-blocking lock order
java.util.Arrays.sort(docIDHashes);
DocumentDescription[] rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
String[] docIDSimpleHashes = new String[docIDHashes.length];
// Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these
// rows that might get affected by carrydown data deletion, not the rows themselves!
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Individual operations are necessary so order can be controlled.
jobQueue.deleteRecord(dd.getID());
docIDSimpleHashes[i] = dd.getDocumentIdentifierHash();
i++;
}
// Next, find the documents that are affected by carrydown deletion.
rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Finally, delete the carrydown records in question.
carryDown.deleteRecords(jobID,docIDSimpleHashes);
if (legalLinkTypes.length > 0)
hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Helper method: Find the document descriptions that will be affected due to carrydown row deletions.
*/
protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes)
throws ManifoldCFException
{
// Break the request into pieces, as needed, and throw everything into a hash for uniqueness.
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = maxClauseProcessDeleteHashSet();
int i = 0;
int z = 0;
while (i < docIDHashes.length)
{
if (z == maxCount)
{
processDeleteHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(docIDHashes[i]);
i++;
z++;
}
if (z > 0)
processDeleteHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Get maximum count.
*/
protected int maxClauseProcessDeleteHashSet()
{
return database.findConjunctionClauseMax(new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)});
}
/** Helper method: look up rows affected by a deleteRecords operation.
*/
protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newList = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)}));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}))
.append(")");
*/
IResultSet set = database.performQuery(sb.toString(),newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTimes are the times that the documents should be rescanned. Null indicates "never".
*@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes,
int[] actions)
throws ManifoldCFException
{
String[] docIDHashes = new String[documentDescriptions.length];
Long[] ids = new Long[documentDescriptions.length];
Long[] executeTimesNew = new Long[documentDescriptions.length];
int[] actionsNew = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimesNew[i] = executeTimes[index];
actionsNew[i] = actions[index];
i++;
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned. Null indicates "never".
*@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action)
throws ManifoldCFException
{
requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action});
}
/** Reset a set of documents for further processing in the future.
* This method is called after some unknown number of the documents were processed, but then a service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTime is the time that the documents should be rescanned.
*@param failTime is the time beyond which a service interruption will be considered a hard failure.
*@param failCount is the number of retries beyond which a service interruption will be considered a hard failure.
*/
public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime,
int action, long failTime, int failCount)
throws ManifoldCFException
{
Long executeTimeLong = new Long(executeTime);
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
Long[] executeTimes = new Long[documentDescriptions.length];
int[] actions = new int[documentDescriptions.length];
long[] failTimes = new long[documentDescriptions.length];
int[] failCounts = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimes[i] = executeTimeLong;
actions[i] = action;
long oldFailTime = documentDescriptions[index].getFailTime();
if (oldFailTime == -1L)
oldFailTime = failTime;
failTimes[i] = oldFailTime;
int oldFailCount = documentDescriptions[index].getFailRetryCount();
if (oldFailCount == -1)
oldFailCount = failCount;
else
{
oldFailCount--;
if (failCount != -1 && oldFailCount > failCount)
oldFailCount = failCount;
}
failCounts[i] = oldFailCount;
i++;
}
// Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether
// an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]);
i++;
}
database.performCommit();
break;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a set of cleaning documents for further processing in the future.
* This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUncleaningStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a cleaning document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description of the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset a set of deleting documents for further processing in the future.
* This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUndeletingStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a deleting document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description object for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset an active document back to its former state.
* This gets done when there's a service interruption and the document cannot be processed yet.
* Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any
* processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now
* presume that processing has perhaps occurred. Perfect rollback is thus no longer possible.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned.
*@param failTime is the time that the document should be considered to have failed, if it has not been
* successfully read until then.
*/
public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime,
int failCount)
throws ManifoldCFException
{
resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount);
}
/** Eliminate duplicates, and sort */
protected static String[] eliminateDuplicates(String[] docIDHashes)
{
HashMap map = new HashMap();
int i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i++];
map.put(docIDHash,docIDHash);
}
String[] rval = new String[map.size()];
i = 0;
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
rval[i++] = (String)iter.next();
}
java.util.Arrays.sort(rval);
return rval;
}
/** Build a reorder map, describing how to convert an original index into a reordered index. */
protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes)
{
HashMap reorderSet = new HashMap();
int i = 0;
while (i < reorderedIDHashes.length)
{
String reorderedIDHash = reorderedIDHashes[i];
Integer position = new Integer(i);
reorderSet.put(reorderedIDHash,position);
i++;
}
HashMap map = new HashMap();
int j = 0;
while (j < originalIDHashes.length)
{
String originalIDHash = originalIDHashes[j];
Integer position = (Integer)reorderSet.get(originalIDHash);
if (position != null)
{
map.put(new Integer(j),position);
// Remove, so that only one of each duplicate will have a place in the map
reorderSet.remove(originalIDHash);
}
j++;
}
return map;
}
/** Add an initial set of documents to the queue.
* This method is called during job startup, when the queue is being loaded.
* A set of document references is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDs are the local document identifiers.
*@param overrideSchedule is true if any existing document schedule should be overridden.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*@param currentTime is the current time in milliseconds since epoch.
*@param documentPriorities are the document priorities corresponding to the document identifiers.
*@param prereqEventNames are the events that must be completed before each document can be processed.
*@return true if the priority value(s) were used, false otherwise.
*/
public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs, boolean overrideSchedule,
int hopcountMethod, long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
// The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead.
// But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with
// our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index.
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized
// transactions are used. But serialized transactions may require a retry in order
// to resolve transaction conflicts.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
int z = 0;
while (z < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[z];
double docPriority = reorderedDocumentPriorities[z];
String docID = reorderedDocumentIdentifiers[z];
String[] docPrereqs = reorderedDocumentPrerequisites[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
long executeTime = overrideSchedule?0L:-1L;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs);
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs);
priorityUsed = true;
}
reorderedRval[z++] = priorityUsed;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Rejigger to correspond with calling order
i = 0;
while (i < docIDs.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add an initial set of remaining documents to the queue.
* This method is called during job startup, when the queue is being loaded, to list documents that
* were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to
* enable the framework to get rid of old, invalid seeds. They are not queued for processing.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*/
public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return;
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes);
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Signal that a seeding pass has been done.
* Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to
* maintain the hopcount table.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot
* supply a full list of seeds on every seeding iteration; this acknowledges that limitation.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial,
int hopcountMethod)
throws ManifoldCFException
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used.
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to start finishing initial docs and hopcounts for job "+jobID.toString());
jobQueue.doneDocumentsInitial(jobID,isPartial);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs and hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the specified hop counts, with the limit as described.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the hashes for the set of documents to find the hopcount for.
*@param linkType is the kind of link to find the hopcount for.
*@param limit is the limit, beyond which a negative distance may be returned.
*@param hopcountMethod is the method for managing hopcounts that is in effect.
*@return a vector of booleans corresponding to the documents requested. A true value is returned
* if the document is within the specified limit, false otherwise.
*/
public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
if (legalLinkTypes.length == 0)
throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept");
// The idea is to delay queue processing as much as possible, because that avoids having to wait
// on locks and having to repeat our evaluations.
//
// Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value
// we find in the table is an upper bound on the true hop distance value. So, only if we have documents
// that are outside the limit does the queue need to be processed.
//
// It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the
// estimate is possibly greater than the true value, a great deal of locking and queue processing will be
// avoided.
// The flow here is to:
// - grab the right hoplock
// - process the queue
// - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString());
}
// Make an answer array.
boolean[] rval = new boolean[docIDHashes.length];
// Make a hash of what we still need a definitive answer for.
HashMap badAnswers = new HashMap();
int i = 0;
while (i < rval.length)
{
String docIDHash = docIDHashes[i];
rval[i] = false;
badAnswers.put(docIDHash,new Integer(i));
i++;
}
int iterationCount = 0;
while (true)
{
// Ask for only about documents we don't have a definitive answer for yet.
String[] askDocIDHashes = new String[badAnswers.size()];
i = 0;
Iterator iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+
" hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+
" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
if (badAnswers.size() == 0)
return rval;
// It appears we need to process the queue. We need to enter the hoplock section
// to make sure only one player is updating values at a time. Then, before we exit, we get the
// remaining values.
askDocIDHashes = new String[badAnswers.size()];
i = 0;
iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
// Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something
// other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing,
// so I've taken the route of prohibiting more than one batch of queue processing at a time, for now.
String hopLockName = getHopLockName(jobID);
long sleepAmt = 0L;
lockManager.enterWriteLock(hopLockName);
try
{
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number)
// and the requirement that database writes are effectively blocked for a while (which argues for a smaller number).
boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod);
// If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were
// interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that
// until we really ARE done.
if (!definitive)
{
// Sleep a little bit so another thread can have a whack at things
sleepAmt = 100L;
database.performCommit();
continue;
}
// Definitive answers found; continue through.
distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
database.performCommit();
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(hopLockName);
sleepFor(sleepAmt);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+
" hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// All answers are guaranteed to be accurate now.
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
return rval;
}
}
/** Get all the current seeds.
* Returns the seed document identifiers for a job.
*@param jobID is the job identifier.
*@return the document identifiers that are currently considered to be seeds.
*/
public String[] getAllSeeds(Long jobID)
throws ManifoldCFException
{
return jobQueue.getAllSeeds(jobID);
}
/** Add documents to the queue in bulk.
* This method is called during document processing, when a set of document references are discovered.
* The document references are passed to this method, which updates the status of the document(s)
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here,
* it is the caller's responsibility to clean these up.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param documentPriorities are the desired document priorities for the documents.
*@param prereqEventNames are the events that must be completed before a document can be queued.
*@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used).
*/
public boolean[] addDocuments(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[][] dataNames, Object[][][] dataValues,
long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDs.length == 0)
return new boolean[0];
// Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions.
// However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are
// duplicate document identifiers.
HashMap nameMap = new HashMap();
int k = 0;
while (k < docIDHashes.length)
{
String docIDHash = docIDHashes[k];
// If there are duplicates, we need to merge them.
HashMap names = (HashMap)nameMap.get(docIDHash);
if (names == null)
{
names = new HashMap();
nameMap.put(docIDHash,names);
}
String[] nameList = dataNames[k];
Object[][] dataList = dataValues[k];
int z = 0;
while (z < nameList.length)
{
String name = nameList[z];
Object[] values = dataList[z];
HashMap valueMap = (HashMap)names.get(name);
if (valueMap == null)
{
valueMap = new HashMap();
names.put(name,valueMap);
}
int y = 0;
while (y < values.length)
{
// Calculate the value hash; that's the true key, and the one that cannot be duplicated.
String valueHash;
if (values[y] instanceof CharacterInput)
{
// It's a CharacterInput object.
valueHash = ((CharacterInput)values[y]).getHashValue();
}
else
{
// It better be a String.
valueHash = ManifoldCF.hash((String)values[y]);
}
valueMap.put(valueHash,values[y]);
y++;
}
z++;
}
k++;
}
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
dataNames = new String[reorderedDocIDHashes.length][];
String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][];
dataValues = new Object[reorderedDocIDHashes.length][][];
k = 0;
while (k < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[k];
HashMap names = (HashMap)nameMap.get(docIDHash);
dataNames[k] = new String[names.size()];
dataHashValues[k] = new String[names.size()][];
dataValues[k] = new Object[names.size()][];
Iterator iter = names.keySet().iterator();
int z = 0;
while (iter.hasNext())
{
String dataName = (String)iter.next();
(dataNames[k])[z] = dataName;
HashMap values = (HashMap)names.get(dataName);
(dataHashValues[k])[z] = new String[values.size()];
(dataValues[k])[z] = new Object[values.size()];
Iterator iter2 = values.keySet().iterator();
int y = 0;
while (iter2.hasNext())
{
String dataValueHash = (String)iter2.next();
Object dataValue = values.get(dataValueHash);
((dataHashValues[k])[z])[y] = dataValueHash;
((dataValues[k])[z])[y] = dataValue;
y++;
}
z++;
}
k++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash);
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
HashMap existingRows = new HashMap();
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]);
}
}
// Update all the carrydown data at once, for greatest efficiency.
boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues);
// Same with hopcount.
boolean[] hopcountChangesSeen = null;
if (parentIdentifierHash != null && relationshipType != null)
hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod);
// Loop through the document id's again, and perform updates where needed
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
boolean reactivateRemovedHopcountRecords = false;
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It was an insert
reorderedRval[z] = true;
else
{
// It was an existing row; do the update logic
// The hopcountChangesSeen array describes whether each reference is a new one. This
// helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents
// to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all!
reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]),
reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]);
// Signal if we need to perform the flip
if (hopcountChangesSeen != null && hopcountChangesSeen[z])
reactivateRemovedHopcountRecords = true;
}
}
if (reactivateRemovedHopcountRecords)
jobQueue.reactivateHopcountRemovedRecords(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
i = 0;
while (i < docIDHashes.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
sleepAmt = getRandomAmount();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e);
continue;
}
throw e;
}
+ catch (RuntimeException e)
+ {
+ database.signalRollback();
+ TrackerClass.noteRollback();
+ throw e;
+ }
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add a document to the queue.
* This method is called during document processing, when a document reference is discovered.
* The document reference is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHash is the local document identifier hash value.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param priority is the desired document priority for the document.
*@param prereqEventNames are the events that must be completed before the document can be processed.
*@return true if the priority value was used, false otherwise.
*/
public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[] dataNames, Object[][] dataValues,
long currentTime, double priority, String[] prereqEventNames)
throws ManifoldCFException
{
return addDocuments(jobID,legalLinkTypes,
new String[]{docIDHash},new String[]{docID},
parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames},
new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0];
}
/** Complete adding child documents to the queue, for a set of documents.
* This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod)
throws ManifoldCFException
{
if (parentIdentifierHashes.length == 0)
return new DocumentDescription[0];
DocumentDescription[] rval;
if (legalLinkTypes.length == 0)
{
// Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional
// deadlock is possible when a document shares multiple parents, so do the whole retry drill
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
else
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
return rval;
}
/** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation.
*/
protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes)
throws ManifoldCFException
{
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = database.getMaxOrClause();
int i = 0;
int z = 0;
while (i < parentIDHashes.length)
{
if (z == maxCount)
{
processParentHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(parentIDHashes[i]);
i++;
z++;
}
if (z > 0)
processParentHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Helper method: look up rows affected by a restoreRecords operation.
*/
protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newlist = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND ");
sb.append("t1.").append(carryDown.newField).append("=?");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t1.").append(carryDown.newField).append("=?")
.append(")");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
*/
IResultSet set = database.performQuery(sb.toString(),newlist,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Begin an event sequence.
*@param eventName is the name of the event.
*@return true if the event could be created, or false if it's already there.
*/
public boolean beginEventSequence(String eventName)
throws ManifoldCFException
{
try
{
eventManager.createEvent(eventName);
return true;
}
catch (ManifoldCFException e)
{
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
return false;
throw e;
}
}
/** Complete an event sequence.
*@param eventName is the name of the event.
*/
public void completeEventSequence(String eventName)
throws ManifoldCFException
{
eventManager.destroyEvent(eventName);
}
/** Requeue a document set because of carrydown changes.
* This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the
* extent that if one is *already* being processed, it will need to be done over again.
*@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed.
*@param docPriorities are the document priorities to assign to the documents, if needed.
*@return a flag for each document priority, true if it was used, false otherwise.
*/
public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new boolean[0];
// Order the updates by document hash, to prevent deadlock as much as possible.
// This map contains the original index of the document id hash.
HashMap docHashMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID();
docHashMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort the hashes
java.util.Arrays.sort(docIDHashes);
boolean[] rval = new boolean[docIDHashes.length];
// Enter transaction and prepare to look up document states in dochash order
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// This is the map that will contain the rows we found, keyed by docIDHash.
HashMap existingRows = new HashMap();
// Loop through hashes in order
int j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
// Get the index
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
// Lookup document description
DocumentDescription dd = documentDescriptions[originalIndex];
// Do the query. We can base this on the id column since we have that.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// If the row is there, we use its current info to requeue it properly.
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
j++;
}
// Ok, existingRows contains all the rows we want to try to update. Go through these and update.
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It wasn't found, so the doc priority wasn't used.
rval[originalIndex] = false;
else
// It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time.
rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,true,docPriorities[originalIndex],null);
j++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Requeue a document because of carrydown changes.
* This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the
* extent that if it is *already* being processed, it will need to be done over again.
*@param documentDescription is the description object for the document that has had its parent carrydown information changed.
*@param docPriority is the document priority to assign to the document, if needed.
*@return a flag for the document priority, true if it was used, false otherwise.
*/
public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority)
throws ManifoldCFException
{
return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0];
}
/** Sleep a random amount of time after a transaction abort.
*/
protected long getRandomAmount()
{
return database.getSleepAmt();
}
protected void sleepFor(long amt)
throws ManifoldCFException
{
database.sleepFor(amt);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public String[] retrieveParentData(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValues(jobID,docIDHash,dataName);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName);
}
// These methods support the job threads (which start jobs and end jobs)
// There is one thread that starts jobs. It simply looks for jobs which are ready to
// start, and changes their state accordingly.
// There is also a pool of threads that end jobs. These threads wait for a job that
// looks like it is done, and do completion processing if it is.
/** Start all jobs in need of starting.
* This method marks all the appropriate jobs as "in progress", which is all that should be
* needed to start them.
* It's also the case that the start event should be logged in the event log. In order to make it possible for
* the caller to do this logging, a set of job ID's will be returned containing the jobs that
* were started.
*@param currentTime is the current time in milliseconds since epoch.
*@param unwaitList is filled in with the set of job ID objects that were resumed.
*/
public void startJobs(long currentTime, ArrayList unwaitList)
throws ManifoldCFException
{
// This method should compare the lasttime field against the current time, for all
// "not active" jobs, and see if a job should be started.
//
// If a job is to be started, then the following occurs:
// (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to
// PURGATORY.
// (2) The job is labeled as "ACTIVE".
// (3) The starttime field is set.
// (4) The endtime field is nulled out.
//
// This method also assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
// Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become
// ACTIVE or PAUSED. This will occur if we have entered a new window for the job.
// Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The
// consistent practice throughout CF is to do the external locks first, then the database locks. This particular method
// thus cannot use cached job description information, because it must throw database locks first against the jobs table.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastTimeField).append(",")
.append(jobs.statusField).append(",")
.append(jobs.startMethodField).append(",")
.append(jobs.outputNameField).append(",")
.append(jobs.connectionNameField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_INACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING),
jobs.statusToString(jobs.STATUS_PAUSEDWAIT),
jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ")
.append(jobs.startMethodField).append("!=? FOR UPDATE");
list.add(jobs.startMethodToString(IJobDescription.START_DISABLE));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info
// for.
Long[] jobIDSet = new Long[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
jobIDSet[i++] = (Long)row.getValue(jobs.idField);
}
ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet);
i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField));
String outputName = (String)row.getValue(jobs.outputNameField);
String connectionName = (String)row.getValue(jobs.connectionNameField);
ScheduleRecord[] thisSchedule = srSet[i++];
// Run at specific times
// We need to start with the start time as given, plus one
long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1;
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+
new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString());
// Proceed to the current time, and find a match if there is one to be found.
// If not -> continue
// We go through *all* the schedule records. The one that matches that has the latest
// end time is the one we take.
Long matchTime = null;
Long duration = null;
boolean requestMinimum = false;
for (int l = 0; l < thisSchedule.length; l++)
{
long trialStartInterval = startInterval;
ScheduleRecord sr = thisSchedule[l];
Long thisDuration = sr.getDuration();
if (startMethod == IJobDescription.START_WINDOWINSIDE &&
thisDuration != null)
{
// Bump the start interval back before the beginning of the current interval.
// This will guarantee a start as long as there is time in the window.
long trialStart = currentTime - thisDuration.longValue();
if (trialStart < trialStartInterval)
trialStartInterval = trialStart;
}
Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime,
sr.getDayOfWeek(),
sr.getDayOfMonth(),
sr.getMonthOfYear(),
sr.getYear(),
sr.getHourOfDay(),
sr.getMinutesOfHour(),
sr.getTimezone(),
thisDuration);
if (thisMatchTime == null)
{
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
continue;
}
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
if (matchTime == null || thisDuration == null ||
(duration != null && thisMatchTime.longValue() + thisDuration.longValue() >
matchTime.longValue() + duration.longValue()))
{
matchTime = thisMatchTime;
duration = thisDuration;
requestMinimum = sr.getRequestMinimum();
}
}
if (matchTime == null)
{
jobs.updateLastTime(jobID,currentTime);
continue;
}
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Calculate the end of the window
Long windowEnd = null;
if (duration != null)
{
windowEnd = new Long(matchTime.longValue()+duration.longValue());
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+
matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")");
}
int newJobState;
switch (status)
{
case Jobs.STATUS_INACTIVE:
// If job was formerly "inactive", do the full startup.
// Start this job! but with no end time.
// This does not get logged because the startup thread does the logging.
jobs.startJob(jobID,windowEnd,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Signalled for job start for job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
default:
break;
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put active or paused jobs in wait state, if they've exceeded their window.
*@param currentTime is the current time in milliseconds since epoch.
*@param waitList is filled in with the set of job ID's that were put into a wait state.
*/
public void waitJobs(long currentTime, ArrayList waitList)
throws ManifoldCFException
{
// This method assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER),
jobs.statusToString(jobs.STATUS_PAUSED),
jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ")
.append(jobs.windowEndField).append("<? FOR UPDATE");
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
waitList.add(jobID);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Make the job wait.
switch (status)
{
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_ACTIVESEEDING:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_PAUSED:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSEDSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSINGSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
default:
break;
}
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically
* be called after a job's scheduling window has been changed.
*@param jobID is the job identifier.
*/
public void resetJobSchedule(Long jobID)
throws ManifoldCFException
{
// Note: This is problematic; the expected behavior is for the job to start if "we are within the window",
// but not to start if the transition to active status was long enough ago.
// Since there's no "right" way to do this, do nothing for now.
// This explicitly did NOT work - it caused the job to refire every time it was saved.
// jobs.updateLastTime(jobID,0L);
}
/** Check if the specified job parameters have a 'hit' within the specified interval.
*@param startTime is the start time.
*@param currentTimestamp is the end time.
*@param daysOfWeek is the enumerated days of the week, or null.
*@param daysOfMonth is the enumerated days of the month, or null.
*@param months is the enumerated months, or null.
*@param years is the enumerated years, or null.
*@param hours is the enumerated hours, or null.
*@param minutes is the enumerated minutes, or null.
*@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds
* from epoch is returned.
*/
protected static Long checkTimeMatch(long startTime, long currentTimestamp,
EnumeratedValues daysOfWeek,
EnumeratedValues daysOfMonth,
EnumeratedValues months,
EnumeratedValues years,
EnumeratedValues hours,
EnumeratedValues minutes,
String timezone,
Long duration)
{
// What we do here is start with the previous timestamp, and advance until we
// either encounter a match, or we exceed the current timestamp.
Calendar c;
if (timezone == null)
{
c = Calendar.getInstance();
}
else
{
c = Calendar.getInstance(TimeZone.getTimeZone(timezone));
}
// Get the current starting time
c.setTimeInMillis(startTime);
// If there's a duration value, we can't match unless we're within the window.
// That means we find a match, and then we verify that the end time is greater than the currenttimestamp.
// If not, we move on (by incrementing)
// The main loop works off of the calendar and these values.
while (c.getTimeInMillis() < currentTimestamp)
{
// Round up to the nearest minute, unless at 0 already
int x = c.get(Calendar.MILLISECOND);
if (x != c.getMinimum(Calendar.MILLISECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MILLISECOND,amtToAdd);
continue;
}
x = c.get(Calendar.SECOND);
if (x != c.getMinimum(Calendar.SECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.SECOND,amtToAdd);
continue;
}
boolean startedToCareYet = false;
x = c.get(Calendar.MINUTE);
// If we care about minutes, round up, otherwise go to the 0 value
if (minutes == null)
{
if (x != c.getMinimum(Calendar.MINUTE))
{
int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MINUTE,amtToAdd);
continue;
}
}
else
{
// See if it is a legit value.
if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE)))
{
// Advance to next legit value
// We could be clever, but we just advance one
c.add(Calendar.MINUTE,1);
continue;
}
startedToCareYet = true;
}
// Hours
x = c.get(Calendar.HOUR_OF_DAY);
if (hours == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY))
{
int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.HOUR_OF_DAY,amtToAdd);
continue;
}
}
else
{
if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY)))
{
// next hour
c.add(Calendar.HOUR_OF_DAY,1);
continue;
}
startedToCareYet = true;
}
// Days of month and days of week are at the same level;
// these advance concurrently. However, if NEITHER is specified, and nothing
// earlier was, then we do the 1st of the month.
x = c.get(Calendar.DAY_OF_WEEK);
if (daysOfWeek != null)
{
if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK)))
{
// next day
c.add(Calendar.DAY_OF_WEEK,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.DAY_OF_MONTH);
if (daysOfMonth == null)
{
// If nothing is specified but the month or the year, do it on the 1st.
if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH))
{
// Move as rapidly as possible towards the first of the month. But in no case, increment
// less than one day.
int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.DAY_OF_MONTH,amtToAdd);
continue;
}
}
else
{
if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH)))
{
// next day
c.add(Calendar.DAY_OF_MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.MONTH);
if (months == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH))
{
int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MONTH,amtToAdd);
continue;
}
}
else
{
if (!months.checkValue(x-c.getMinimum(Calendar.MONTH)))
{
c.add(Calendar.MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.YEAR);
if (years != null)
{
if (!years.checkValue(x))
{
c.add(Calendar.YEAR,1);
continue;
}
startedToCareYet = true;
}
// Looks like a match.
// Last check is to be sure we are in the window, if any. If we are outside the window,
// must skip forward.
if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp)
{
c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND));
continue;
}
return new Long(c.getTimeInMillis());
}
return null;
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*/
public void manualStart(Long jobID)
throws ManifoldCFException
{
manualStart(jobID,false);
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualStart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() < 1)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
if (status != Jobs.STATUS_INACTIVE)
throw new ManifoldCFException("Job "+jobID+" is already running");
IJobDescription jobDescription = jobs.load(jobID,true);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually starting job "+jobID);
}
// Start this job! but with no end time.
jobs.startJob(jobID,null,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent");
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Note job delete started.
*@param jobID is the job id.
*@param startTime is the job delete start time.
*/
public void noteJobDeleteStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobDeleteStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" delete is now started");
}
/** Note job started.
*@param jobID is the job id.
*@param startTime is the job start time.
*/
public void noteJobStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" is now started");
}
/** Note job seeded.
*@param jobID is the job id.
*@param seedTime is the job seed time.
*/
public void noteJobSeeded(Long jobID, long seedTime)
throws ManifoldCFException
{
jobs.noteJobSeeded(jobID,seedTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" has been successfully reseeded");
}
/** Prepare for a delete scan.
*@param jobID is the job id.
*/
public void prepareDeleteScan(Long jobID)
throws ManifoldCFException
{
// No special treatment needed for hopcount or carrydown, since these all get deleted at once
// at the end of the job delete process.
TrackerClass.notePrecommit();
jobQueue.prepareDeleteScan(jobID);
TrackerClass.noteCommit();
}
/** Prepare a job to be run.
* This method is called regardless of the details of the job; what differs is only the flags that are passed in.
* The code inside will determine the appropriate procedures.
* (This method replaces prepareFullScan() and prepareIncrementalScan(). )
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@param connectorModel is the model used by the connector for the job.
*@param continuousJob is true if the job is a continuous one.
*@param fromBeginningOfTime is true if the job is running starting from time 0.
*@param requestMinimum is true if the minimal amount of work is requested for the job run.
*/
public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod,
int connectorModel, boolean continuousJob, boolean fromBeginningOfTime,
boolean requestMinimum)
throws ManifoldCFException
{
// (1) If the connector has MODEL_ADD_CHANGE_DELETE, then
// we let the connector run the show; there's no purge phase, and therefore the
// documents are left in a COMPLETED state if they don't show up in the list
// of seeds that require the attention of the connector. However, we do need to
// preload the queue with all the existing documents, if there was any change to the
// specification information (which will mean that fromBeginningOfTime is set).
//
// (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so
// we do a full scan initialization.
//
// (3) If the connector has some other model, we look at the start time. A start
// time of 0 implies a full scan, while any other start time implies an incremental
// scan.
// Complete connector model is told everything, so no delete phase.
if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
return;
}
// If the connector model is complete via chaining, then we just need to make
// sure discovery works to queue the changes.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
else
jobQueue.preparePartialScan(jobID);
return;
}
// Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless
// the job criteria have changed.
if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime)
{
// If it is a chained model, do the partial prep.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD ||
connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE)
jobQueue.preparePartialScan(jobID);
return;
}
if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL &&
(connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime))
prepareFullScan(jobID,legalLinkTypes,hopcountMethod);
else
jobQueue.prepareIncrementalScan(jobID);
}
/** Queue all existing.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*/
protected void queueAllExisting(Long jobID, String[] legalLinkTypes)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
if (legalLinkTypes.length > 0)
{
jobQueue.reactivateHopcountRemovedRecords(jobID);
}
jobQueue.queueAllExisting(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Prepare for a full scan.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// Since we delete documents here, we need to manage the hopcount part of the world too.
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Delete the documents we have never fetched, including any hopcount records we've calculated.
if (legalLinkTypes.length > 0)
{
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t99."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})});
hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99",
"t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField,
query,list,
hopcountMethod);
}
jobQueue.prepareFullScan(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Manually abort a running job. The job will be permanently stopped, and will not run again until
* automatically started based on schedule, or manually started.
*@param jobID is the job to abort.
*/
public void manualAbort(Long jobID)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually aborting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortJob(jobID,null);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualAbortRestart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortRestartJob(jobID,requestMinimum);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" restart signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*/
public void manualAbortRestart(Long jobID)
throws ManifoldCFException
{
manualAbortRestart(jobID,false);
}
/** Abort a running job due to a fatal error condition.
*@param jobID is the job to abort.
*@param errorText is the error text.
*@return true if this is the first logged abort request for this job.
*/
public boolean errorAbort(Long jobID, String errorText)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'");
}
boolean rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
rval = jobs.abortJob(jobID,errorText);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (rval && Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
return rval;
}
/** Pause a job.
*@param jobID is the job identifier to pause.
*/
public void pauseJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually pausing job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.pauseJob(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully paused");
}
}
/** Restart a paused job.
*@param jobID is the job identifier to restart.
*/
public void restartJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting paused job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.restartJob(jobID);
jobQueue.clearFailTimes(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully restarted");
}
}
/** Get the list of jobs that are ready for seeding.
*@return jobs that are active and are running in adaptive mode. These will be seeded
* based on what the connector says should be added to the queue.
*/
public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.reseedIntervalField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ")
.append(jobs.typeField).append("=? AND ")
.append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)")
.append(" FOR UPDATE");
list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS));
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
Long r = (Long)row.getValue(jobs.reseedIntervalField);
Long reseedTime;
if (r != null)
reseedTime = new Long(currentTime + r.longValue());
else
reseedTime = null;
// Mark status of job as "active/seeding". Special status is needed so that abort
// will not complete until seeding is completed.
jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for seeding");
}
rval[i] = new JobSeedingRecord(jobID,synchTime);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for deletion.
*@return jobs that were in the "readyfordelete" state.
*/
public JobDeleteRecord[] getJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for delete startup");
}
rval[i] = new JobDeleteRecord(jobID);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for startup.
*@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state.
*/
public JobStartRecord[] getJobsReadyForStartup()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_READYFORSTARTUP),
jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobStartRecord[] rval = new JobStartRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
// Mark status of job as "starting"
jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for startup");
}
rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Inactivate a job, from the notification state.
*@param jobID is the ID of the job to inactivate.
*/
public void inactivateJob(Long jobID)
throws ManifoldCFException
{
// While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution
// it might be a good idea to put this in a transaction and have the state get checked first.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
jobs.notificationComplete(jobID);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job starting for delete back to "ready for delete"
* state.
*@param jobID is the job id.
*/
public void resetStartDeleteJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_DELETESTARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job that is notifying back to "ready for notify"
* state.
*@param jobID is the job id.
*/
public void resetNotifyJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state");
// Set the state of the job back to "ReadyForNotify"
jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting notify job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a starting job back to "ready for startup" state.
*@param jobID is the job id.
*/
public void resetStartupJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_STARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP);
break;
case Jobs.STATUS_STARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state");
// Set the state of the job back to "ReadyForStartupMinimal"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL);
break;
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting startup job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a seeding job back to "active" state.
*@param jobID is the job id.
*/
public void resetSeedJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED);
break;
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT);
break;
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER);
break;
case Jobs.STATUS_ACTIVESEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE);
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT);
break;
case Jobs.STATUS_PAUSEDSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSED);
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT);
break;
case Jobs.STATUS_ABORTINGSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_PAUSEDWAIT:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Delete jobs in need of being deleted (which are marked "ready for delete").
* This method is meant to be called periodically to perform delete processing on jobs.
*/
public void deleteJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested
// document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other
// threads, so eventually a job will become eligible. This happens when there are no records that have an ingested
// status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory.
database.beginTransaction();
try
{
// The original query was:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND
// t1.status IN ('C', 'F', 'G'))
//
// However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Now, loop through this list. For each one, verify that it's okay to delete it
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
list.clear();
sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
ManifoldCF.noteConfigurationChange();
// Remove documents from job queue
jobQueue.deleteAllJobRecords(jobID);
// Remove carrydowns for the job
carryDown.deleteOwner(jobID);
// Nothing is in a critical section - so this should be OK.
hopCount.deleteOwner(jobID);
jobs.delete(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Removed job "+jobID);
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put all eligible jobs in the "shutting down" state.
*/
public void finishJobs()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// The jobs we should transition:
// - are active
// - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records
database.beginTransaction();
try
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','P','F','G'))
// This did not get along well with Postgresql, so instead this is what is now done:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// Mark status of job as "finishing"
jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for shutdown");
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted finishing jobs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Find the list of jobs that need to have their connectors notified of job completion.
*@return the ID's of jobs that need their output connectors notified in order to become inactive.
*/
public JobNotifyRecord[] getJobsReadyForInactivity()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Return them all
JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Found job "+jobID+" in need of notification");
}
rval[i++] = new JobNotifyRecord(jobID);
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Complete the sequence that resumes jobs, either from a pause or from a scheduling window
* wait. The logic will restore the job to an active state (many possibilities depending on
* connector status), and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed.
*/
public void finishJobResumes(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_RESUMING),
jobs.statusToString(jobs.STATUS_RESUMINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// There are no secondary checks that need to be made; just resume
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishResumeJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Resumed job "+jobID);
}
}
}
/** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling
* window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT),
* and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped.
*/
public void finishJobStops(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','F'))
// Now the query is broken up so that Postgresql behaves more efficiently.
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ABORTING),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL),
jobs.statusToString(jobs.STATUS_PAUSING),
jobs.statusToString(jobs.STATUS_PAUSINGSEEDING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index.
// See CONNECTORS-290.
// We do this BEFORE updating the job state.
jobQueue.clearDocPriorities(jobID);
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishStopJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Stopped job "+jobID);
}
}
}
/** Reset eligible jobs either back to the "inactive" state, or make them active again. The
* latter will occur if the cleanup phase of the job generated more pending documents.
*
* This method is used to pick up all jobs in the shutting down state
* whose purgatory or being-cleaned records have been all processed.
*
*@param currentTime is the current time in milliseconds since epoch.
*@param resetJobs is filled in with the set of IJobDescription objects that were reset.
*/
public void resetJobs(long currentTime, ArrayList resetJobs)
throws ManifoldCFException
{
// Query for all jobs that fulfill the criteria
// The query used to look like:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status='P')
//
// Now, the query is broken up, for performance
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PURGATORY),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// The shutting-down phase is complete. However, we need to check if there are any outstanding
// PENDING or PENDINGPURGATORY records before we can decide what to do.
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
{
// This job needs to re-enter the active state. Make that happen.
jobs.returnJobToActive(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" is re-entering active state");
}
}
else
{
// This job should be marked as finished.
IJobDescription jobDesc = jobs.load(jobID,true);
resetJobs.add(jobDesc);
jobs.finishJob(jobID,currentTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now completed");
}
}
}
}
// Status reports
/** Get the status of a job.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID)
throws ManifoldCFException
{
return getStatus(jobID,true);
}
/** Get a list of all jobs, and their status information.
*@return an ordered array of job status objects.
*/
@Override
public JobStatus[] getAllStatus()
throws ManifoldCFException
{
return getAllStatus(true);
}
/** Get a list of running jobs. This is for status reporting.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs()
throws ManifoldCFException
{
return getRunningJobs(true);
}
/** Get a list of completed jobs, and their statistics.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs()
throws ManifoldCFException
{
return getFinishedJobs(true);
}
/** Get the status of a job.
*@param jobID is the job ID.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
public JobStatus getStatus(Long jobID, boolean includeCounts)
throws ManifoldCFException
{
return getStatus(jobID, includeCounts, Integer.MAX_VALUE);
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts)
throws ManifoldCFException
{
return getAllStatus(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getRunningJobs(boolean includeCounts)
throws ManifoldCFException
{
return getRunningJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getFinishedJobs(boolean includeCounts)
throws ManifoldCFException
{
return getFinishedJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get the status of a job.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList list = new ArrayList();
String whereClause = Jobs.idField+"=?";
list.add(jobID);
JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount);
if (records.length == 0)
return null;
return records[0];
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
return makeJobStatus(null,null,includeCounts,maxCount);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList whereParams = new ArrayList();
String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new MultiClause(Jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER),
Jobs.statusToString(Jobs.STATUS_PAUSED),
Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSING),
Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_RESUMING),
Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING)
})});
return makeJobStatus(whereClause,whereParams,includeCounts,maxCount);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList whereParams = new ArrayList();
sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ")
.append(Jobs.endTimeField).append(" IS NOT NULL");
return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount);
}
// Protected methods and classes
/** Make a job status array from a query result.
*@param whereClause is the where clause for the jobs we are interested in.
*@return the status array.
*/
protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
IResultSet set = database.performQuery("SELECT t0."+
Jobs.idField+",t0."+
Jobs.descriptionField+",t0."+
Jobs.statusField+",t0."+
Jobs.startTimeField+",t0."+
Jobs.endTimeField+",t0."+
Jobs.errorField+
" FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC",
whereParams,null,null);
// Build hashes for set2 and set3
Map<Long,Long> set2Hash = new HashMap<Long,Long>();
Map<Long,Long> set3Hash = new HashMap<Long,Long>();
Map<Long,Long> set4Hash = new HashMap<Long,Long>();
Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>();
if (includeCounts)
{
// If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But
// otherwise, fire off an individual query at a time.
if (maxCount == Integer.MAX_VALUE)
{
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet countResult = database.performQuery(sb.toString(),list,null,null);
if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount)
{
// Too many items in queue; do it the hard way
buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Cheap way should still work.
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
}
}
JobStatus[] rval = new JobStatus[set.getRowCount()];
for (int i = 0; i < rval.length; i++)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
String description = row.getValue(Jobs.descriptionField).toString();
int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString());
Long startTimeValue = (Long)row.getValue(Jobs.startTimeField);
long startTime = -1;
if (startTimeValue != null)
startTime = startTimeValue.longValue();
Long endTimeValue = (Long)row.getValue(Jobs.endTimeField);
long endTime = -1;
if (endTimeValue != null)
endTime = endTimeValue.longValue();
String errorText = (String)row.getValue(Jobs.errorField);
if (errorText != null && errorText.length() == 0)
errorText = null;
int rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
switch (status)
{
case Jobs.STATUS_INACTIVE:
if (errorText != null)
rstatus = JobStatus.JOBSTATUS_ERROR;
else
{
if (startTime >= 0)
rstatus = JobStatus.JOBSTATUS_COMPLETED;
else
rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
}
break;
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED;
break;
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVESEEDING:
rstatus = JobStatus.JOBSTATUS_RUNNING;
break;
case Jobs.STATUS_SHUTTINGDOWN:
rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP;
break;
case Jobs.STATUS_READYFORNOTIFY:
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION;
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGSEEDING:
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_ABORTING;
break;
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
rstatus = JobStatus.JOBSTATUS_RESTARTING;
break;
case Jobs.STATUS_PAUSING:
case Jobs.STATUS_PAUSINGSEEDING:
case Jobs.STATUS_ACTIVEWAITING:
case Jobs.STATUS_ACTIVEWAITINGSEEDING:
case Jobs.STATUS_PAUSINGWAITING:
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
rstatus = JobStatus.JOBSTATUS_STOPPING;
break;
case Jobs.STATUS_RESUMING:
case Jobs.STATUS_RESUMINGSEEDING:
rstatus = JobStatus.JOBSTATUS_RESUMING;
break;
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_PAUSEDSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_ACTIVEWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_WINDOWWAIT;
break;
case Jobs.STATUS_PAUSEDWAIT:
case Jobs.STATUS_PAUSEDWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_STARTINGUP:
case Jobs.STATUS_STARTINGUPMINIMAL:
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_STARTING;
break;
case Jobs.STATUS_DELETESTARTINGUP:
case Jobs.STATUS_READYFORDELETE:
case Jobs.STATUS_DELETING:
case Jobs.STATUS_DELETING_NOOUTPUT:
rstatus = JobStatus.JOBSTATUS_DESTRUCTING;
break;
default:
break;
}
Long set2Value = set2Hash.get(jobID);
Long set3Value = set3Hash.get(jobID);
Long set4Value = set4Hash.get(jobID);
Boolean set2ExactValue = set2Exact.get(jobID);
Boolean set3ExactValue = set3Exact.get(jobID);
Boolean set4ExactValue = set4Exact.get(jobID);
rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()),
((set3Value==null)?0L:set3Value.longValue()),
((set4Value==null)?0L:set4Value.longValue()),
((set2ExactValue==null)?true:set2ExactValue.booleanValue()),
((set3ExactValue==null)?true:set3ExactValue.booleanValue()),
((set4ExactValue==null)?true:set4ExactValue.booleanValue()),
startTime,endTime,errorText);
}
return rval;
}
protected static ClauseDescription buildOutstandingClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_ACTIVE),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN),
JobQueue.statusToString(JobQueue.STATUS_PENDING),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected static ClauseDescription buildProcessedClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_COMPLETE),
JobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
JobQueue.statusToString(JobQueue.STATUS_PURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
// Fire off an individual query with a limit for each job
// First, get the list of jobs that we are interested in.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0");
if (whereClause != null)
{
sb.append(" WHERE ")
.append(whereClause);
if (whereParams != null)
list.addAll(whereParams);
}
IResultSet jobSet = database.performQuery(sb.toString(),list,null,null);
// Scan the set of jobs
for (int i = 0; i < jobSet.getRowCount(); i++)
{
IResultRow row = jobSet.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
// Now, for each job, fire off a separate, limited, query for each count we care about
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet totalSet = database.performQuery(sb.toString(),list,null,null);
if (totalSet.getRowCount() > 0)
{
long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set2Hash.put(jobID,new Long(maxCount));
set2Exact.put(jobID,new Boolean(false));
}
else
{
set2Hash.put(jobID,new Long(rowCount));
set2Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null);
if (outstandingSet.getRowCount() > 0)
{
long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set3Hash.put(jobID,new Long(maxCount));
set3Exact.put(jobID,new Boolean(false));
}
else
{
set3Hash.put(jobID,new Long(rowCount));
set3Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet processedSet = database.performQuery(sb.toString(),list,null,null);
if (processedSet.getRowCount() > 0)
{
long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set4Hash.put(jobID,new Long(maxCount));
set4Exact.put(jobID,new Boolean(false));
}
else
{
set4Hash.put(jobID,new Long(rowCount));
set4Exact.put(jobID,new Boolean(true));
}
}
}
}
protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set2 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set3 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set4 = database.performQuery(sb.toString(),list,null,null);
for (int j = 0; j < set2.getRowCount(); j++)
{
IResultRow row = set2.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set2Hash.put(jobID,(Long)row.getValue("doccount"));
set2Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set3.getRowCount(); j++)
{
IResultRow row = set3.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set3Hash.put(jobID,(Long)row.getValue("doccount"));
set3Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set4.getRowCount(); j++)
{
IResultRow row = set4.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set4Hash.put(jobID,(Long)row.getValue("doccount"));
set4Exact.put(jobID,new Boolean(true));
}
}
protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent)
{
if (whereClause != null)
{
if (wherePresent)
sb.append(" AND");
else
sb.append(" WHERE");
sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ")
.append(whereClause)
.append(")");
if (whereParams != null)
list.addAll(whereParams);
}
}
// These methods generate reports for direct display in the UI.
/** Run a 'document status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the
* "retrylimit" column are long values representing a time; all other values will be user-friendly strings.
*/
public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
int startRow, int rowCount)
throws ManifoldCFException
{
// Build the query.
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(" AS id,")
.append("t0.").append(jobQueue.docIDField).append(" AS identifier,")
.append("t1.").append(jobs.descriptionField).append(" AS job,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'")
.append(" ELSE 'Unknown'")
.append(" END AS state,")
.append("CASE")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Inactive'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 'Waiting forever'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append("=?")
.append(" THEN 'Hopcount exceeded'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Deleting'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Expiring'")
.append(" ELSE 'Unknown'")
.append(" END AS status,")
.append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'")
.append(" ELSE 'Unknown'")
.append(" END AS action,")
.append("t0.").append(jobQueue.failCountField).append(" AS retrycount,")
.append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit")
.append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)}));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
addCriteria(sb,list,"t0.",connectionName,filterCriteria,true);
// The intrinsic ordering is provided by the "id" column, and nothing else.
addOrdering(sb,new String[]{"id"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
/** Run a 'queue status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param idBucketDescription is the bucket description for generating the identifier class.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting,
processready, expireready, processwaiting, expirewaiting
*/
public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
BucketDescription idBucketDescription, int startRow, int rowCount)
throws ManifoldCFException
{
// SELECT substring(docid FROM '<id_regexp>') AS idbucket,
// substring(entityidentifier FROM '<id_regexp>') AS idbucket,
// SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria>
// GROUP BY idbucket
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,")
.append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,")
.append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT ");
addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription);
sb.append(" AS idbucket,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" AS inactive,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processing,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expiring,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as deleting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expireready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processwaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expirewaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as waitingforever,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as hopcountexceeded");
sb.append(" FROM ").append(jobQueue.getTableName());
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
addCriteria(sb,list,"",connectionName,filterCriteria,false);
sb.append(") t1 GROUP BY idbucket");
addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
// Protected methods for report generation
/** Turn a bucket description into a return column.
* This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is
* desired, that means we whack the whole thing to lower case before doing the match.
*/
protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc)
{
boolean isSensitive = bucketDesc.isSensitive();
list.add(bucketDesc.getRegexp());
sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive));
}
/** Add criteria clauses to query.
*/
protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted)
throws ManifoldCFException
{
Long[] matchingJobs = criteria.getJobs();
if (matchingJobs != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
if (matchingJobs.length == 0)
{
sb.append("0>1");
}
else
{
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)}));
}
}
RegExpCriteria identifierRegexp = criteria.getIdentifierMatch();
if (identifierRegexp != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
list.add(identifierRegexp.getRegexpString());
sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive()));
}
Long nowTime = new Long(criteria.getNowTime());
int[] states = criteria.getMatchingStates();
int[] statuses = criteria.getMatchingStatuses();
if (states.length == 0 || statuses.length == 0)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("0>1");
return whereEmitted;
}
// Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex.
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
int k = 0;
while (k < states.length)
{
int stateValue = states[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATE_NEVERPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})}));
break;
case DOCSTATE_PREVIOUSLYPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATE_OUTOFSCOPE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
k = 0;
while (k < statuses.length)
{
int stateValue = statuses[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATUS_INACTIVE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATUS_PROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))}));
break;
case DOCSTATUS_EXPIRING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))}));
break;
case DOCSTATUS_DELETING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})}));
break;
case DOCSTATUS_READYFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_READYFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_WAITINGFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREVER:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL");
break;
case DOCSTATUS_HOPCOUNTEXCEEDED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
return whereEmitted;
}
/** Emit a WHERE or an AND, depending...
*/
protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted)
{
if (whereEmitted)
sb.append(" AND ");
else
sb.append(" WHERE ");
return true;
}
/** Add ordering.
*/
protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort)
{
// Keep track of the fields we've seen
Map hash = new HashMap();
// Emit the "Order by"
sb.append(" ORDER BY ");
// Go through the specified list
int i = 0;
int count = sort.getCount();
while (i < count)
{
if (i > 0)
sb.append(",");
String column = sort.getColumn(i);
sb.append(column);
if (sort.getDirection(i) == sort.SORT_ASCENDING)
sb.append(" ASC");
else
sb.append(" DESC");
hash.put(column,column);
i++;
}
// Now, go through the complete field list, and emit sort criteria for everything
// not actually specified. This is so LIMIT and OFFSET give consistent results.
int j = 0;
while (j < completeFieldList.length)
{
String field = completeFieldList[j];
if (hash.get(field) == null)
{
if (i > 0)
sb.append(",");
sb.append(field);
sb.append(" DESC");
//if (j == 0)
// sb.append(" DESC");
//else
// sb.append(" ASC");
i++;
}
j++;
}
}
/** Add limit and offset.
*/
protected void addLimits(StringBuilder sb, int startRow, int maxRowCount)
{
sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount));
}
/** Class for tracking existing jobqueue row data */
protected static class JobqueueRecord
{
protected Long recordID;
protected int status;
protected Long checkTimeValue;
public JobqueueRecord(Long recordID, int status, Long checkTimeValue)
{
this.recordID = recordID;
this.status = status;
this.checkTimeValue = checkTimeValue;
}
public Long getRecordID()
{
return recordID;
}
public int getStatus()
{
return status;
}
public Long getCheckTimeValue()
{
return checkTimeValue;
}
}
/** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */
private static int EXTRA_FACTOR = 2;
/** This class provides the throttling limits for the job queueing query.
*/
protected static class ThrottleLimit implements ILimitChecker
{
// For each connection, there is (a) a number (which is the maximum per bin), and (b)
// a current running count per bin. These are stored as elements in a hash map.
protected HashMap connectionMap = new HashMap();
// The maximum number of jobs that have reached their chunk size limit that we
// need
protected int n;
// This is the hash table that maps a job ID to the object that tracks the number
// of documents already accumulated for this resultset. The count of the number
// of queue records we have is tallied by going through each job in this table
// and adding the records outstanding for it.
protected HashMap jobQueueHash = new HashMap();
// This is the map from jobid to connection name
protected HashMap jobConnection = new HashMap();
// This is the set of allowed connection names. We discard all documents that are
// not from that set.
protected HashMap activeConnections = new HashMap();
// This is the number of documents per set per connection.
protected HashMap setSizes = new HashMap();
// These are the individual connection maximums, keyed by connection name.
protected HashMap maxConnectionCounts = new HashMap();
// This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit.
protected int maxSetSize = 0;
// This is the number of documents processed so far
protected int documentsProcessed = 0;
// This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects.
protected ArrayList blockingDocumentArray = new ArrayList();
// Cutoff time for documents eligible for prioritization
protected long prioritizationTime;
/** Constructor.
* This class is built up piecemeal, so the constructor does nothing.
*@param n is the maximum number of full job descriptions we want at this time.
*/
public ThrottleLimit(int n, long prioritizationTime)
{
this.n = n;
this.prioritizationTime = prioritizationTime;
Logging.perf.debug("Limit instance created");
}
/** Transfer blocking documents discovered to BlockingDocuments object */
public void tallyBlockingDocuments(BlockingDocuments blockingDocuments)
{
int i = 0;
while (i < blockingDocumentArray.size())
{
DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++);
blockingDocuments.addBlockingDocument(dd);
}
blockingDocumentArray.clear();
}
/** Add a job/connection name map entry.
*@param jobID is the job id.
*@param connectionName is the connection name.
*/
public void addJob(Long jobID, String connectionName)
{
jobConnection.put(jobID,connectionName);
}
/** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation.
*@param connectionName is the connection name.
*/
public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance)
throws ManifoldCFException
{
activeConnections.put(connectionName,connectorInstance);
int setSize = connectorInstance.getMaxDocumentRequest();
setSizes.put(connectionName,new Integer(setSize));
if (setSize > maxSetSize)
maxSetSize = setSize;
}
/** Add a document limit for a specified connection. This is the limit across all matching bins; if any
* individual matching bin exceeds that limit, then documents that belong to that bin will be excluded.
*@param connectionName is the connection name.
*@param regexp is the regular expression, which we will match against various bins.
*@param upperLimit is the maximum count associated with the specified job.
*/
public void addLimit(String connectionName, String regexp, int upperLimit)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'");
ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName);
if (ji == null)
{
ji = new ThrottleJobItem();
connectionMap.put(connectionName,ji);
}
ji.addLimit(regexp,upperLimit);
}
/** Set a connection-based total document limit.
*/
public void setConnectionLimit(String connectionName, int maxDocuments)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName);
maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments));
}
/** See if this class can be legitimately compared against another of
* the same type.
*@return true if comparisons will ever return "true".
*/
public boolean doesCompareWork()
{
return false;
}
/** Create a duplicate of this class instance. All current state should be preserved.
* NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot
* be cached, and therefore duplicate() is never called from the query executor. But it can
* be called from other places.
*@return the duplicate.
*/
public ILimitChecker duplicate()
{
return makeDeepCopy();
}
/** Make a deep copy */
public ThrottleLimit makeDeepCopy()
{
ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime);
// Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes)
// do not need a deep copy.
rval.activeConnections = activeConnections;
rval.setSizes = setSizes;
rval.maxConnectionCounts = maxConnectionCounts;
rval.maxSetSize = maxSetSize;
rval.jobConnection = jobConnection;
// The structures where counts are maintained DO need a deep copy.
rval.documentsProcessed = documentsProcessed;
Iterator iter;
iter = connectionMap.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate());
}
iter = jobQueueHash.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate());
}
return rval;
}
/** Find the hashcode for this class. This will only ever be used if
* doesCompareWork() returns true.
*@return the hashcode.
*/
public int hashCode()
{
return 0;
}
/** Compare two objects and see if equal. This will only ever be used
* if doesCompareWork() returns true.
*@param object is the object to compare against.
*@return true if equal.
*/
public boolean equals(Object object)
{
return false;
}
/** Get the remaining documents we should query for.
*@return the maximal remaining count.
*/
public int getRemainingDocuments()
{
return EXTRA_FACTOR * n * maxSetSize - documentsProcessed;
}
/** See if a result row should be included in the final result set.
*@param row is the result row to check.
*@return true if it should be included, false otherwise.
*/
public boolean checkInclude(IResultRow row)
throws ManifoldCFException
{
// Note: This method does two things: First, it insures that the number of documents per job per bin does
// not exceed the calculated throttle number. Second, it keeps track of how many document queue items
// will be needed, so we can stop when we've got enough for the moment.
Logging.perf.debug("Checking if row should be included");
// This is the end that does the work.
// The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField
Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField);
// Get the connection name for this row
String connectionName = (String)jobConnection.get(jobIDValue);
if (connectionName == null)
{
Logging.perf.debug(" Row does not have an eligible job - excluding");
return false;
}
IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName);
if (connectorInstance == null)
{
Logging.perf.debug(" Row does not have an eligible connector instance - excluding");
return false;
}
// Find the connection limit for this document
MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName);
if (connectionLimit != null)
{
if (connectionLimit.intValue() == 0)
{
Logging.perf.debug(" Row exceeds its connection limit - excluding");
return false;
}
connectionLimit.decrement();
}
// Tally this item in the job queue hash, so we can detect when to stop
QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue);
if (queueItem == null)
{
// Need to talk to the connector to get a max number of docs per chunk
int maxCount = ((Integer)setSizes.get(connectionName)).intValue();
queueItem = new QueueHashItem(maxCount);
jobQueueHash.put(jobIDValue,queueItem);
}
String docIDHash = (String)row.getValue(JobQueue.docHashField);
String docID = (String)row.getValue(JobQueue.docIDField);
// Figure out what the right bins are, given the data we have.
// This will involve a call to the connector.
String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID);
// Keep the running count, so we can abort without going through the whole set.
documentsProcessed++;
//scanRecord.addBins(binNames);
ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName);
// If there is no schedule-based throttling on this connection, we're done.
if (item == null)
{
queueItem.addDocument();
Logging.perf.debug(" Row has no throttling - including");
return true;
}
int j = 0;
while (j < binNames.length)
{
if (item.isEmpty(binNames[j]))
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding");
Object o = row.getValue(JobQueue.prioritySetField);
if (o == null || ((Long)o).longValue() <= prioritizationTime)
{
// Need to add a document descriptor based on this row to the blockingDocuments object!
// This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't
// be there.
Long id = (Long)row.getValue(JobQueue.idField);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID);
blockingDocumentArray.add(dd);
}
return false;
}
j++;
}
j = 0;
while (j < binNames.length)
{
item.decrement(binNames[j++]);
}
queueItem.addDocument();
Logging.perf.debug(" Including!");
return true;
}
/** See if we should examine another row.
*@return true if we need to keep going, or false if we are done.
*/
public boolean checkContinue()
throws ManifoldCFException
{
if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize)
return false;
// If the number of chunks exceeds n, we are done
Iterator iter = jobQueueHash.keySet().iterator();
int count = 0;
while (iter.hasNext())
{
Long jobID = (Long)iter.next();
QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID);
count += item.getChunkCount();
if (count > n)
return false;
}
return true;
}
}
/** This class contains information per job on how many queue items have so far been accumulated.
*/
protected static class QueueHashItem
{
// The number of items per chunk for this job
int itemsPerChunk;
// The number of chunks so far, INCLUDING incomplete chunks
int chunkCount = 0;
// The number of documents in the current incomplete chunk
int currentDocumentCount = 0;
/** Construct.
*@param itemsPerChunk is the number of items per chunk for this job.
*/
public QueueHashItem(int itemsPerChunk)
{
this.itemsPerChunk = itemsPerChunk;
}
/** Duplicate. */
public QueueHashItem duplicate()
{
QueueHashItem rval = new QueueHashItem(itemsPerChunk);
rval.chunkCount = chunkCount;
rval.currentDocumentCount = currentDocumentCount;
return rval;
}
/** Add a document to this job.
*/
public void addDocument()
{
currentDocumentCount++;
if (currentDocumentCount == 1)
chunkCount++;
if (currentDocumentCount == itemsPerChunk)
currentDocumentCount = 0;
}
/** Get the number of chunks.
*@return the number of chunks.
*/
public int getChunkCount()
{
return chunkCount;
}
}
/** This class represents the information stored PER JOB in the throttling structure.
* In this structure, "remaining" counts are kept for each bin. When the bin becomes empty,
* then no more documents that would map to that bin will be returned, for this query.
*
* The way in which the maximum count per bin is determined is not part of this class.
*/
protected static class ThrottleJobItem
{
/** These are the bin limits. This is an array of ThrottleLimitSpec objects. */
protected ArrayList throttleLimits = new ArrayList();
/** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be
* the same as maxBinCount. */
protected HashMap binCounts = new HashMap();
/** Constructor. */
public ThrottleJobItem()
{
}
/** Add a bin limit.
*@param regexp is the regular expression describing the bins to which the limit applies to.
*@param maxCount is the maximum number of fetches allowed for that bin.
*/
public void addLimit(String regexp, int maxCount)
{
try
{
throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount));
}
catch (PatternSyntaxException e)
{
// Ignore the bad entry; it just won't contribute any throttling.
}
}
/** Create a duplicate of this item.
*@return the duplicate.
*/
public ThrottleJobItem duplicate()
{
ThrottleJobItem rval = new ThrottleJobItem();
rval.throttleLimits = throttleLimits;
Iterator iter = binCounts.keySet().iterator();
while (iter.hasNext())
{
String key = (String)iter.next();
this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate());
}
return rval;
}
/** Check if the specified bin is empty.
*@param binName is the bin name.
*@return true if empty.
*/
public boolean isEmpty(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
int remaining;
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return false;
remaining = x;
}
else
remaining = value.intValue();
return (remaining == 0);
}
/** Decrement specified bin.
*@param binName is the bin name.
*/
public void decrement(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return;
value = new MutableInteger(x);
binCounts.put(binName,value);
}
value.decrement();
}
/** Given a bin name, find the max value for it using the regexps that are in place.
*@param binName is the bin name.
*@return the max count for that bin, or -1 if infinite.
*/
protected int findMaxCount(String binName)
{
// Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com"
//
// We want to be able to do a couple of different kinds of things easily. For example, we want to:
// - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains
// - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we
// can establish a faster rate for .com than for foo.metacarta.com
//
// The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number:
// ^[^\.] = 8
//
// To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate
// value be chosen when more than one regexp match is found:
// ^[^\.] = 8
// ^foo\.metacarta\.com = 4
//
// To apply different rates for different levels:
// ^[^\.] = 8
// ^\.[^\.]*\.[^\.]*$ = 20
// ^\.[^\.]*$ = 40
//
// If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be
// more what the world wants to do (restrict, rather than increase, fetch rates).
int maxCount = -1;
int i = 0;
while (i < throttleLimits.size())
{
ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++);
Pattern p = spec.getRegexp();
Matcher m = p.matcher(binName);
if (m.find())
{
int limit = spec.getMaxCount();
if (maxCount == -1 || limit < maxCount)
maxCount = limit;
}
}
return maxCount;
}
}
/** This is a class which describes an individual throttle limit, in fetches. */
protected static class ThrottleLimitSpec
{
/** Regexp */
protected Pattern regexp;
/** The fetch limit for all bins matching that regexp */
protected int maxCount;
/** Constructor */
public ThrottleLimitSpec(String regexp, int maxCount)
throws PatternSyntaxException
{
this.regexp = Pattern.compile(regexp);
this.maxCount = maxCount;
}
/** Get the regexp. */
public Pattern getRegexp()
{
return regexp;
}
/** Get the max count */
public int getMaxCount()
{
return maxCount;
}
}
/** Mutable integer class.
*/
protected static class MutableInteger
{
int value;
/** Construct.
*/
public MutableInteger(int value)
{
this.value = value;
}
/** Duplicate */
public MutableInteger duplicate()
{
return new MutableInteger(value);
}
/** Decrement.
*/
public void decrement()
{
value--;
}
/** Increment.
*/
public void increment()
{
value++;
}
/** Get value.
*/
public int intValue()
{
return value;
}
}
}
| true | true | public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which belong to a job that's in a "shutting down" state and are in
// a "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM
// jobs t3 WHERE t0.jobid=t3.id AND t3.status='X')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.cleaningJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the cleaning queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock. This means we don't need a FOR UPDATE on the query.
lockManager.enterWriteLock(cleanStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned".
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ")
.append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name and output connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.setCleaningStatus(dd.getID());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return new DocumentSetAndFlags(rval,rvalBoolean);
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(cleanStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Create a composite document hash key. This consists of the document id hash plus the
* connection name.
*/
protected static String makeCompositeID(String docIDHash, String connectionName)
{
return docIDHash + ":" + connectionName;
}
/** Get list of deletable document descriptions. This list will take into account
* multiple jobs that may own the same document. All documents for which a description
* is returned will be transitioned to the "beingdeleted" state. Documents which are
* not in transition and are eligible, but are owned by other jobs, will have their
* jobqueue entries deleted by this method.
*@param maxCount is the maximum number of documents to return.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@return the document descriptions for these documents.
*/
public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which either belong to a job that's in a "delete pending" state and are in
// a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job
// that's in a "shutting down" state and are in the "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='D')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.deletingJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the delete queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock so that multiple threads can't be in here at the same time
lockManager.enterWriteLock(deleteStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted".
// If FOR UPDATE was included, deadlock happened a lot.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ")
.append("t0.").append(jobQueue.checkTimeField).append("<=? AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash to reduce chances of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()];
int j = 0;
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocumentID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID);
if (allowedDocIds.get(compositeDocumentID) == null)
{
// Delete this record and do NOT return it.
jobQueue.deleteRecord(dd.getID());
// What should we do about hopcount here?
// We are deleting a record which belongs to a job that is being
// cleaned up. The job itself will go away when this is done,
// and so will all the hopcount stuff pertaining to it. So, the
// treatment I've chosen here is to leave the hopcount alone and
// let the job cleanup get rid of it at the right time.
// Note: carrydown records handled in the same manner...
//carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()});
}
else
{
// Set the record status to "being deleted" and return it
rval[j++] = dd;
jobQueue.setDeletingStatus(dd.getID());
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return rval;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(deleteStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Get a list of document identifiers that should actually be deleted from the index, from a list that
* might contain identifiers that are shared with other jobs, which are targeted to the same output connection.
* The input list is guaranteed to be smaller in size than maxInClauseCount for the database.
*@param documentIdentifiers is the set of document identifiers to consider.
*@param connectionName is the connection name for ALL the document identifiers.
*@param outputConnectionName is the output connection name for ALL the document identifiers.
*@return the set of documents which should be removed from the index.
*/
protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName)
throws ManifoldCFException
{
// This is where we will count the individual document id's
HashMap countMap = new HashMap();
// First thing: Compute the set of document identifier hash values to query against
HashMap map = new HashMap();
int i = 0;
while (i < documentIdentifiers.length)
{
String hash = documentIdentifiers[i++].getDocumentIdentifierHash();
map.put(hash,hash);
countMap.put(hash,new MutableInteger(0));
}
if (map.size() == 0)
return new String[0];
// Build a query
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
ArrayList docList = new ArrayList();
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
docList.add(iter.next());
}
// Note: There is a potential race condition here. One job may be running while another is in process of
// being deleted. If they share a document, then the delete task could decide to delete the document and do so right
// after the ingestion takes place in the running job, but right before the document's status is updated
// in the job queue [which would have prevented the deletion].
// Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea)
// we are stuck with the possibility of this condition, which will essentially lead to a document being
// missing from the index.
// One way of dealing with this is to treat "active" documents as already ingested, for the purpose of
// reference counting. Then these documents will not be deleted. The risk then becomes that the "active"
// document entry will not be completed (say, because of a restart), and thus the corresponding document
// will never be removed from the index.
//
// Instead, the only solution is to not queue a document for any activity that is inconsistent with activities
// that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED"
// and "BEING_CLEANED" state
// for a document. These states will allow the various queries that queue up activities to avoid documents that
// are currently being processed elsewhere.
sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ")
.append("t1.").append(jobs.connectionNameField).append("=? AND ")
.append("t1.").append(jobs.outputNameField).append("=?)");
list.add(connectionName);
list.add(outputConnectionName);
// Do the query, and then count the number of times each document identifier occurs.
IResultSet results = database.performQuery(sb.toString(),list,null,null);
i = 0;
while (i < results.getRowCount())
{
IResultRow row = results.getRow(i++);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi != null)
mi.increment();
}
// Go through and count only those that have a count of 1.
int count = 0;
iter = countMap.keySet().iterator();
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
count++;
}
String[] rval = new String[count];
iter = countMap.keySet().iterator();
count = 0;
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
rval[count++] = docIDHash;
}
return rval;
}
// These methods support the reprioritization thread.
/** Get a list of already-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
// The desired query is:
// SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n)
sb.append("SELECT ")
.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_COMPLETE),
jobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" ");
sb.append(database.constructOffsetLimitClause(0,n));
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
// This query MUST return only documents that are in a pending state which belong to an active job!!!
sb.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED),
JobQueue.statusToString(jobQueue.STATUS_PENDING),
JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ")
.append(jobQueue.checkActionField).append("=?").append(" AND ");
list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN));
// Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them,
// so this will be changed to not include jobs where the priorities have been bashed to null.
//
// I've included ALL states that might have non-null doc priorities. This includes states
// corresponding to uninstalled connectors, since there is no transition that cleans out the
// document priorities in these states. The time during which a connector is uninstalled is
// expected to be short, because typically this state is the result of an installation procedure
// rather than willful action on the part of a user.
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_STARTINGUP),
Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL),
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER)
}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(database.constructOffsetLimitClause(0,n));
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Save a set of document priorities. In the case where a document was eligible to have its
* priority set, but it no longer is eligible, then the provided priority will not be written.
*@param currentTime is the time in milliseconds since epoch.
*@param documentDescriptions are the document descriptions.
*@param priorities are the desired priorities.
*/
public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities)
throws ManifoldCFException
{
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
// This should be ordered by document identifier hash in order to prevent potential deadlock conditions
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Need to order the writes by doc id.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
DocumentDescription dd = documentDescriptions[index];
double priority = priorities[index];
jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString());
i++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get up to the next n documents to be expired.
* This method marks the documents whose descriptions have been returned as "being processed", or active.
* The same marking is used as is used for documents that have been queued for worker threads. The model
* is thus identical.
*
*@param n is the maximum number of records desired.
*@param currentTime is the current time.
*@return the array of document descriptions to expire.
*/
public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime)
throws ManifoldCFException
{
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning query to look for documents to expire");
}
// Put together a query with a limit of n
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.jobIDField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField).append(",")
.append("t0.").append(jobQueue.statusField).append(",")
.append("t0.").append(jobQueue.failTimeField).append(",")
.append("t0.").append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,n));
String query = sb.toString();
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
ArrayList answers = new ArrayList();
int repeatCount = 0;
while (true)
{
long sleepAmt = 0L;
// Enter a write lock, so only one thread can be doing this. That makes FOR UPDATE unnecessary.
lockManager.enterWriteLock(expireStufferLock);
try
{
if (Logging.perf.isDebugEnabled())
{
repeatCount++;
Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+
new Long(System.currentTimeMillis() - startTime)+" ms");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(query,list,null,null,n,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
statusMap.put(compositeDocumentID,new Integer(status));
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return new DocumentSetAndFlags(rval, rvalBoolean);
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(expireStufferLock);
sleepFor(sleepAmt);
}
}
}
// This method supports the "queue stuffer" thread
/**
/** Get up to the next n document(s) to be fetched and processed.
* This fetch returns records that contain the document identifier, plus all instructions
* pertaining to the document's handling (e.g. whether it should be refetched if the version
* has not changed).
* This method also marks the documents whose descriptions have be returned as "being processed".
*@param n is the maximum number of records desired.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@param interval is the number of milliseconds that this set of documents should represent (for throttling).
*@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization,
* but could not be queued due to throttling considerations.
*@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing
* so that individual connections are not overwhelmed.
*@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due
* to being overcommitted.
*@return the array of document descriptions to fetch and process.
*/
public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval,
BlockingDocuments blockingDocuments, PerformanceStatistics statistics,
DepthStatistics scanRecord)
throws ManifoldCFException
{
// NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in
// a given interval. Therefore, the returned result has the following constraints on it:
// 1) There must be no more than n documents returned total;
// 2) For any given job that is throttled, the total number of documents returned must be
// consistent with the time interval provided.
// In general, this requires the database layer to perform fairly advanced filtering on the
// the result, far in excess of a simple count. An implementation of an interface is therefore
// going to need to be passed into the performQuery() operation, which prunes the resultset
// as it is being read into memory. That's a new feature that will need to be added to the
// database layer.
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to queue");
}
// Below there used to be one large transaction, with multiple read seconds and multiple write sections.
// As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this
// transaction up. However, the transaction depended for its correctness in throttling on making sure
// that the throttles that were built were based on the same active jobs that the subsequent queries
// that did the stuffing relied upon. This made reorganization impossible until I realized that with
// Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more
// robust solution.
//
// Specifically, I chose to change the way documents were queued so that only documents from properly
// throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track
// the very knowledge of an active job. This had the additional benefit of meaning there was no chance of
// a query occurring from inside a resultset filter.
//
// But, after I did this, it was no longer necessary to have such a large transaction either.
// Anything older than 10 minutes ago is considered eligible for reprioritization.
long prioritizationTime = currentTime - 60000L * 10L;
ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime);
IResultSet jobconnections = jobs.getActiveJobConnections();
HashMap connectionSet = new HashMap();
int i = 0;
while (i < jobconnections.getRowCount())
{
IResultRow row = jobconnections.getRow(i++);
Long jobid = (Long)row.getValue("jobid");
String connectionName = (String)row.getValue("connectionname");
vList.addJob(jobid,connectionName);
connectionSet.put(connectionName,connectionName);
}
// Find the active connection names. We'll load these, and then get throttling info
// from each one.
String[] activeConnectionNames = new String[connectionSet.size()];
Iterator iter = connectionSet.keySet().iterator();
i = 0;
while (iter.hasNext())
{
activeConnectionNames[i++] = (String)iter.next();
}
IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames);
// Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment
// factor and set the connection limits.
HashMap rawFetchCounts = new HashMap();
double rawFetchCountTotal = 0.0;
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
int maxConnections = connection.getMaxConnections();
double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName);
double weightedRawFetchCount = avgFetchRate * (double)maxConnections;
// Keep the avg rate for later use, since it may get updated before next time we need it.
rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount));
rawFetchCountTotal += weightedRawFetchCount;
}
// Calculate an adjustment factor
double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal;
// For each job, we must amortize the maximum number of fetches per ms to the actual interval,
// and also randomly select an extra fetch based on the fractional probability. (This latter is
// necessary for the case where the maximum fetch rate is specified to be pretty low.)
//
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
// Check if throttled...
String[] throttles = connection.getThrottles();
int k = 0;
while (k < throttles.length)
{
// The key is the regexp value itself
String throttle = throttles[k++];
float throttleValue = connection.getThrottleValue(throttle);
// For the given connection, set the fetch limit per bin. This is calculated using the time interval
// and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch"
// on a weighted random basis.
//
// In the future, the connection may specify tuples which pair a regexp describing a set of bins against
// a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum
// count.
double fetchesPerTimeInterval = (double)throttleValue * (double)interval;
// Actual amount will be the integer value of this, plus an additional 1 if the random number aligns
int fetches = (int)fetchesPerTimeInterval;
fetchesPerTimeInterval -= (double)fetches;
if (random.nextDouble() <= fetchesPerTimeInterval)
fetches++;
// Save the limit in the ThrottleLimit structure
vList.addLimit(connectionName,throttle,fetches);
}
// For the overall connection, we also have a limit which is based on the number of connections there are actually available.
Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName);
double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor;
// Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing
// the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots
// of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment.
// One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much.
//
// Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum.
int fetchCount = ((int)adjustedFetchCount) + 5;
vList.setConnectionLimit(connectionName,fetchCount);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue");
// System.out.println("Done building throttle structure");
// Locate records.
// Note that we do NOT want to get everything there is to know about the job
// using this query, since the file specification may be large and expensive
// to parse. We will load a (cached) copy of the job description for that purpose.
//
// NOTE: This query deliberately excludes documents which may be being processed by another job.
// (It actually excludes a bit more than that, because the exact query is impossible to write given
// the fact that document id's cannot be compared.) These are documents where there is ANOTHER
// document entry with the same hash value, a different job id, and a status which is either "active",
// "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or
// whether the document id's are in fact the same, and therefore may temporarily block legitimate document
// activity under rare circumstances.)
//
// The query I want is:
// SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx
// AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='A')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','D'))
// ORDER BY docpriority ASC LIMIT xxx
//
// NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every
// document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at
// one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents
// without working with a monster resultset.
//
// I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index
// to pull its results in an ordered fashion
//
//
// Another subtlety is that I *must* mark the documents active as I find them, so that they do not
// have any chance of getting returned twice.
// Accumulate the answers here
ArrayList answers = new ArrayList();
// The current time value
Long currentTimeValue = new Long(currentTime);
// Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance.
// This turned out to be needed in postgresql 8.3, even though 8.2 worked fine.
//jobQueue.unconditionallyAnalyzeTables();
// Loop through priority values
int currentPriority = 1;
boolean isDone = false;
while (!isDone && currentPriority <= 10)
{
if (jobs.hasPriorityJobs(currentPriority))
{
Long currentPriorityValue = new Long((long)currentPriority);
fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections);
isDone = !vList.checkContinue();
}
currentPriority++;
}
// Assert the blocking documents we discovered
vList.tallyBlockingDocuments(blockingDocuments);
// Convert the saved answers to an array
DocumentDescription[] rval = new DocumentDescription[answers.size()];
i = 0;
while (i < rval.length)
{
rval[i] = (DocumentDescription)answers.get(i);
i++;
}
// After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment.
// This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are
// higher than the current level that is currently being dequeued.
//
// The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current
// document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs'
// job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well.
// These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary
// to know what classes a document belonged to in order to be able to calculate its priority.
//
// An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc.
// That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal.
// Find the one row from a live job that has the best document priority, which is available within the current time window.
// Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even
// germane at the moment.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause(jobQueue.statusField,
new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField},
true)).append(" ")
.append(database.constructOffsetLimitClause(0,1,true));
IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null);
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue)
scanRecord.addBins(docPriority);
}
return rval;
}
/** Fetch and process documents matching the passed-in criteria */
protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue,
ThrottleLimit vList, IRepositoryConnection[] connections)
throws ManifoldCFException
{
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query.
// So, instead, as part of CONNECTORS-781, I've introduced a write lock for the pertinent section.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT t0.");
sb.append(jobQueue.idField).append(",t0.");
if (Logging.scheduling.isDebugEnabled())
sb.append(jobQueue.docPriorityField).append(",t0.");
sb.append(jobQueue.jobIDField).append(",t0.")
.append(jobQueue.docHashField).append(",t0.")
.append(jobQueue.docIDField).append(",t0.")
.append(jobQueue.statusField).append(",t0.")
.append(jobQueue.failTimeField).append(",t0.")
.append(jobQueue.failCountField).append(",t0.")
.append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField),
new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
// Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name)
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.")
.append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.")
.append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField)
.append(")");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
"t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField},
true)).append(" ");
// Before entering the transaction, we must provide the throttlelimit object with all the connector
// instances it could possibly need. The purpose for doing this is to prevent a deadlock where
// connector starvation causes database lockup.
//
// The preallocation of multiple connector instances is certainly a worry. If any other part
// of the code allocates multiple connector instances also, the potential exists for this to cause
// deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple"
// at the connector factory level to make sure these requests are properly ordered.
String[] orderingKeys = new String[connections.length];
String[] classNames = new String[connections.length];
ConfigParams[] configParams = new ConfigParams[connections.length];
int[] maxConnections = new int[connections.length];
int k = 0;
while (k < connections.length)
{
IRepositoryConnection connection = connections[k];
orderingKeys[k] = connection.getName();
classNames[k] = connection.getClassName();
configParams[k] = connection.getConfigParams();
maxConnections[k] = connection.getMaxConnections();
k++;
}
// Never sleep with a resource locked!
while (true)
{
long sleepAmt = 0L;
// Write lock insures that only one thread cluster-wide can be doing this at a given time, so FOR UPDATE is unneeded.
lockManager.enterWriteLock(stufferLock);
try
{
IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections);
try
{
// Hand the connectors off to the ThrottleLimit instance
k = 0;
while (k < connections.length)
{
vList.addConnectionName(connections[k].getName(),connectors[k]);
k++;
}
// Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap
int limitValue = vList.getRemainingDocuments();
sb.append(database.constructOffsetLimitClause(0,limitValue,true));
if (Logging.perf.isDebugEnabled())
{
Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+
" (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
String[] docIDHashes = new String[set.getRowCount()];
Map storageMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long id = (Long)row.getValue(jobQueue.idField);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
String docID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = -1;
else
failCount = (int)failCountValue.longValue();
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount);
docIDHashes[i] = docIDHash + ":" + jobID;
storageMap.put(docIDHashes[i],dd);
statusMap.put(docIDHashes[i],new Integer(status));
if (Logging.scheduling.isDebugEnabled())
{
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list");
}
i++;
}
// No duplicates are possible here
java.util.Arrays.sort(docIDHashes);
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash);
Long id = dd.getID();
int status = ((Integer)statusMap.get(docIDHash)).intValue();
// Set status to "ACTIVE".
jobQueue.updateActiveRecord(id,status);
answers.add(dd);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
RepositoryConnectorFactory.releaseMultiple(connectors);
}
}
finally
{
lockManager.leaveWriteLock(stufferLock);
sleepFor(sleepAmt);
}
}
}
// These methods support the individual fetch/process threads.
/** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy.
* The query done within MUST be cached in order to not cause undue performance degradation.
*@param jobID is the job identifier.
*@return true if the job is in one of the "active" states.
*/
public boolean checkJobActive(Long jobID)
throws ManifoldCFException
{
return jobs.checkJobActive(jobID);
}
/** Verify if a job is still processing documents, or no longer has any outstanding active documents */
public boolean checkJobBusy(Long jobID)
throws ManifoldCFException
{
return jobQueue.checkJobBusy(jobID);
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescriptions are the description objects for the documents that were processed.
*/
public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions)
throws ManifoldCFException
{
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Do one row at a time, to avoid deadlocking things
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
jobQueue.updateCompletedRecord(dd.getID(),status);
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescription is the description object for the document that was processed.
*/
public void markDocumentCompleted(DocumentDescription documentDescription)
throws ManifoldCFException
{
markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription});
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// It's no longer an issue to have to deal with documents being conditionally deleted; that's been
// taken over by the hopcountremoval method below. So just use the simple 'delete' functionality.
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// For each record, we're going to have to choose between marking it as "hopcount removed", and marking
// it for rescan. So the basic flow will involve changing a document's status,.
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Do one row at a time, to avoid deadlocking things
List<String> deleteList = new ArrayList<String>();
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status);
if (didDelete)
{
deleteList.add(dd.getDocumentIdentifierHash());
}
}
i++;
}
String[] docIDSimpleHashes = new String[deleteList.size()];
for (int j = 0; j < docIDSimpleHashes.length; j++)
{
docIDSimpleHashes[j] = deleteList.get(j);
}
// Next, find the documents that are affected by carrydown deletion.
DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Since hopcount inheritance and prerequisites came from the addDocument() method,
// we don't delete them here.
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete documents with no repercussions. We don't have to worry about the current state of each document,
* since the document is definitely going away.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new DocumentDescription[0];
// Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else.
// In all cases, the state of the document excludes other activity.
// The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add
// a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid
// the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere
// else.
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString());
}
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort by doc hash, to establish non-blocking lock order
java.util.Arrays.sort(docIDHashes);
DocumentDescription[] rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
String[] docIDSimpleHashes = new String[docIDHashes.length];
// Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these
// rows that might get affected by carrydown data deletion, not the rows themselves!
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Individual operations are necessary so order can be controlled.
jobQueue.deleteRecord(dd.getID());
docIDSimpleHashes[i] = dd.getDocumentIdentifierHash();
i++;
}
// Next, find the documents that are affected by carrydown deletion.
rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Finally, delete the carrydown records in question.
carryDown.deleteRecords(jobID,docIDSimpleHashes);
if (legalLinkTypes.length > 0)
hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Helper method: Find the document descriptions that will be affected due to carrydown row deletions.
*/
protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes)
throws ManifoldCFException
{
// Break the request into pieces, as needed, and throw everything into a hash for uniqueness.
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = maxClauseProcessDeleteHashSet();
int i = 0;
int z = 0;
while (i < docIDHashes.length)
{
if (z == maxCount)
{
processDeleteHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(docIDHashes[i]);
i++;
z++;
}
if (z > 0)
processDeleteHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Get maximum count.
*/
protected int maxClauseProcessDeleteHashSet()
{
return database.findConjunctionClauseMax(new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)});
}
/** Helper method: look up rows affected by a deleteRecords operation.
*/
protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newList = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)}));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}))
.append(")");
*/
IResultSet set = database.performQuery(sb.toString(),newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTimes are the times that the documents should be rescanned. Null indicates "never".
*@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes,
int[] actions)
throws ManifoldCFException
{
String[] docIDHashes = new String[documentDescriptions.length];
Long[] ids = new Long[documentDescriptions.length];
Long[] executeTimesNew = new Long[documentDescriptions.length];
int[] actionsNew = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimesNew[i] = executeTimes[index];
actionsNew[i] = actions[index];
i++;
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned. Null indicates "never".
*@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action)
throws ManifoldCFException
{
requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action});
}
/** Reset a set of documents for further processing in the future.
* This method is called after some unknown number of the documents were processed, but then a service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTime is the time that the documents should be rescanned.
*@param failTime is the time beyond which a service interruption will be considered a hard failure.
*@param failCount is the number of retries beyond which a service interruption will be considered a hard failure.
*/
public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime,
int action, long failTime, int failCount)
throws ManifoldCFException
{
Long executeTimeLong = new Long(executeTime);
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
Long[] executeTimes = new Long[documentDescriptions.length];
int[] actions = new int[documentDescriptions.length];
long[] failTimes = new long[documentDescriptions.length];
int[] failCounts = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimes[i] = executeTimeLong;
actions[i] = action;
long oldFailTime = documentDescriptions[index].getFailTime();
if (oldFailTime == -1L)
oldFailTime = failTime;
failTimes[i] = oldFailTime;
int oldFailCount = documentDescriptions[index].getFailRetryCount();
if (oldFailCount == -1)
oldFailCount = failCount;
else
{
oldFailCount--;
if (failCount != -1 && oldFailCount > failCount)
oldFailCount = failCount;
}
failCounts[i] = oldFailCount;
i++;
}
// Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether
// an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]);
i++;
}
database.performCommit();
break;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a set of cleaning documents for further processing in the future.
* This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUncleaningStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a cleaning document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description of the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset a set of deleting documents for further processing in the future.
* This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUndeletingStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a deleting document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description object for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset an active document back to its former state.
* This gets done when there's a service interruption and the document cannot be processed yet.
* Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any
* processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now
* presume that processing has perhaps occurred. Perfect rollback is thus no longer possible.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned.
*@param failTime is the time that the document should be considered to have failed, if it has not been
* successfully read until then.
*/
public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime,
int failCount)
throws ManifoldCFException
{
resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount);
}
/** Eliminate duplicates, and sort */
protected static String[] eliminateDuplicates(String[] docIDHashes)
{
HashMap map = new HashMap();
int i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i++];
map.put(docIDHash,docIDHash);
}
String[] rval = new String[map.size()];
i = 0;
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
rval[i++] = (String)iter.next();
}
java.util.Arrays.sort(rval);
return rval;
}
/** Build a reorder map, describing how to convert an original index into a reordered index. */
protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes)
{
HashMap reorderSet = new HashMap();
int i = 0;
while (i < reorderedIDHashes.length)
{
String reorderedIDHash = reorderedIDHashes[i];
Integer position = new Integer(i);
reorderSet.put(reorderedIDHash,position);
i++;
}
HashMap map = new HashMap();
int j = 0;
while (j < originalIDHashes.length)
{
String originalIDHash = originalIDHashes[j];
Integer position = (Integer)reorderSet.get(originalIDHash);
if (position != null)
{
map.put(new Integer(j),position);
// Remove, so that only one of each duplicate will have a place in the map
reorderSet.remove(originalIDHash);
}
j++;
}
return map;
}
/** Add an initial set of documents to the queue.
* This method is called during job startup, when the queue is being loaded.
* A set of document references is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDs are the local document identifiers.
*@param overrideSchedule is true if any existing document schedule should be overridden.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*@param currentTime is the current time in milliseconds since epoch.
*@param documentPriorities are the document priorities corresponding to the document identifiers.
*@param prereqEventNames are the events that must be completed before each document can be processed.
*@return true if the priority value(s) were used, false otherwise.
*/
public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs, boolean overrideSchedule,
int hopcountMethod, long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
// The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead.
// But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with
// our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index.
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized
// transactions are used. But serialized transactions may require a retry in order
// to resolve transaction conflicts.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
int z = 0;
while (z < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[z];
double docPriority = reorderedDocumentPriorities[z];
String docID = reorderedDocumentIdentifiers[z];
String[] docPrereqs = reorderedDocumentPrerequisites[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
long executeTime = overrideSchedule?0L:-1L;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs);
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs);
priorityUsed = true;
}
reorderedRval[z++] = priorityUsed;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Rejigger to correspond with calling order
i = 0;
while (i < docIDs.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add an initial set of remaining documents to the queue.
* This method is called during job startup, when the queue is being loaded, to list documents that
* were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to
* enable the framework to get rid of old, invalid seeds. They are not queued for processing.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*/
public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return;
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes);
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Signal that a seeding pass has been done.
* Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to
* maintain the hopcount table.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot
* supply a full list of seeds on every seeding iteration; this acknowledges that limitation.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial,
int hopcountMethod)
throws ManifoldCFException
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used.
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to start finishing initial docs and hopcounts for job "+jobID.toString());
jobQueue.doneDocumentsInitial(jobID,isPartial);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs and hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the specified hop counts, with the limit as described.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the hashes for the set of documents to find the hopcount for.
*@param linkType is the kind of link to find the hopcount for.
*@param limit is the limit, beyond which a negative distance may be returned.
*@param hopcountMethod is the method for managing hopcounts that is in effect.
*@return a vector of booleans corresponding to the documents requested. A true value is returned
* if the document is within the specified limit, false otherwise.
*/
public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
if (legalLinkTypes.length == 0)
throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept");
// The idea is to delay queue processing as much as possible, because that avoids having to wait
// on locks and having to repeat our evaluations.
//
// Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value
// we find in the table is an upper bound on the true hop distance value. So, only if we have documents
// that are outside the limit does the queue need to be processed.
//
// It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the
// estimate is possibly greater than the true value, a great deal of locking and queue processing will be
// avoided.
// The flow here is to:
// - grab the right hoplock
// - process the queue
// - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString());
}
// Make an answer array.
boolean[] rval = new boolean[docIDHashes.length];
// Make a hash of what we still need a definitive answer for.
HashMap badAnswers = new HashMap();
int i = 0;
while (i < rval.length)
{
String docIDHash = docIDHashes[i];
rval[i] = false;
badAnswers.put(docIDHash,new Integer(i));
i++;
}
int iterationCount = 0;
while (true)
{
// Ask for only about documents we don't have a definitive answer for yet.
String[] askDocIDHashes = new String[badAnswers.size()];
i = 0;
Iterator iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+
" hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+
" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
if (badAnswers.size() == 0)
return rval;
// It appears we need to process the queue. We need to enter the hoplock section
// to make sure only one player is updating values at a time. Then, before we exit, we get the
// remaining values.
askDocIDHashes = new String[badAnswers.size()];
i = 0;
iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
// Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something
// other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing,
// so I've taken the route of prohibiting more than one batch of queue processing at a time, for now.
String hopLockName = getHopLockName(jobID);
long sleepAmt = 0L;
lockManager.enterWriteLock(hopLockName);
try
{
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number)
// and the requirement that database writes are effectively blocked for a while (which argues for a smaller number).
boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod);
// If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were
// interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that
// until we really ARE done.
if (!definitive)
{
// Sleep a little bit so another thread can have a whack at things
sleepAmt = 100L;
database.performCommit();
continue;
}
// Definitive answers found; continue through.
distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
database.performCommit();
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(hopLockName);
sleepFor(sleepAmt);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+
" hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// All answers are guaranteed to be accurate now.
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
return rval;
}
}
/** Get all the current seeds.
* Returns the seed document identifiers for a job.
*@param jobID is the job identifier.
*@return the document identifiers that are currently considered to be seeds.
*/
public String[] getAllSeeds(Long jobID)
throws ManifoldCFException
{
return jobQueue.getAllSeeds(jobID);
}
/** Add documents to the queue in bulk.
* This method is called during document processing, when a set of document references are discovered.
* The document references are passed to this method, which updates the status of the document(s)
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here,
* it is the caller's responsibility to clean these up.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param documentPriorities are the desired document priorities for the documents.
*@param prereqEventNames are the events that must be completed before a document can be queued.
*@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used).
*/
public boolean[] addDocuments(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[][] dataNames, Object[][][] dataValues,
long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDs.length == 0)
return new boolean[0];
// Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions.
// However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are
// duplicate document identifiers.
HashMap nameMap = new HashMap();
int k = 0;
while (k < docIDHashes.length)
{
String docIDHash = docIDHashes[k];
// If there are duplicates, we need to merge them.
HashMap names = (HashMap)nameMap.get(docIDHash);
if (names == null)
{
names = new HashMap();
nameMap.put(docIDHash,names);
}
String[] nameList = dataNames[k];
Object[][] dataList = dataValues[k];
int z = 0;
while (z < nameList.length)
{
String name = nameList[z];
Object[] values = dataList[z];
HashMap valueMap = (HashMap)names.get(name);
if (valueMap == null)
{
valueMap = new HashMap();
names.put(name,valueMap);
}
int y = 0;
while (y < values.length)
{
// Calculate the value hash; that's the true key, and the one that cannot be duplicated.
String valueHash;
if (values[y] instanceof CharacterInput)
{
// It's a CharacterInput object.
valueHash = ((CharacterInput)values[y]).getHashValue();
}
else
{
// It better be a String.
valueHash = ManifoldCF.hash((String)values[y]);
}
valueMap.put(valueHash,values[y]);
y++;
}
z++;
}
k++;
}
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
dataNames = new String[reorderedDocIDHashes.length][];
String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][];
dataValues = new Object[reorderedDocIDHashes.length][][];
k = 0;
while (k < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[k];
HashMap names = (HashMap)nameMap.get(docIDHash);
dataNames[k] = new String[names.size()];
dataHashValues[k] = new String[names.size()][];
dataValues[k] = new Object[names.size()][];
Iterator iter = names.keySet().iterator();
int z = 0;
while (iter.hasNext())
{
String dataName = (String)iter.next();
(dataNames[k])[z] = dataName;
HashMap values = (HashMap)names.get(dataName);
(dataHashValues[k])[z] = new String[values.size()];
(dataValues[k])[z] = new Object[values.size()];
Iterator iter2 = values.keySet().iterator();
int y = 0;
while (iter2.hasNext())
{
String dataValueHash = (String)iter2.next();
Object dataValue = values.get(dataValueHash);
((dataHashValues[k])[z])[y] = dataValueHash;
((dataValues[k])[z])[y] = dataValue;
y++;
}
z++;
}
k++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash);
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
HashMap existingRows = new HashMap();
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]);
}
}
// Update all the carrydown data at once, for greatest efficiency.
boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues);
// Same with hopcount.
boolean[] hopcountChangesSeen = null;
if (parentIdentifierHash != null && relationshipType != null)
hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod);
// Loop through the document id's again, and perform updates where needed
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
boolean reactivateRemovedHopcountRecords = false;
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It was an insert
reorderedRval[z] = true;
else
{
// It was an existing row; do the update logic
// The hopcountChangesSeen array describes whether each reference is a new one. This
// helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents
// to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all!
reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]),
reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]);
// Signal if we need to perform the flip
if (hopcountChangesSeen != null && hopcountChangesSeen[z])
reactivateRemovedHopcountRecords = true;
}
}
if (reactivateRemovedHopcountRecords)
jobQueue.reactivateHopcountRemovedRecords(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
i = 0;
while (i < docIDHashes.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
sleepAmt = getRandomAmount();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e);
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add a document to the queue.
* This method is called during document processing, when a document reference is discovered.
* The document reference is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHash is the local document identifier hash value.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param priority is the desired document priority for the document.
*@param prereqEventNames are the events that must be completed before the document can be processed.
*@return true if the priority value was used, false otherwise.
*/
public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[] dataNames, Object[][] dataValues,
long currentTime, double priority, String[] prereqEventNames)
throws ManifoldCFException
{
return addDocuments(jobID,legalLinkTypes,
new String[]{docIDHash},new String[]{docID},
parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames},
new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0];
}
/** Complete adding child documents to the queue, for a set of documents.
* This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod)
throws ManifoldCFException
{
if (parentIdentifierHashes.length == 0)
return new DocumentDescription[0];
DocumentDescription[] rval;
if (legalLinkTypes.length == 0)
{
// Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional
// deadlock is possible when a document shares multiple parents, so do the whole retry drill
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
else
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
return rval;
}
/** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation.
*/
protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes)
throws ManifoldCFException
{
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = database.getMaxOrClause();
int i = 0;
int z = 0;
while (i < parentIDHashes.length)
{
if (z == maxCount)
{
processParentHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(parentIDHashes[i]);
i++;
z++;
}
if (z > 0)
processParentHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Helper method: look up rows affected by a restoreRecords operation.
*/
protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newlist = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND ");
sb.append("t1.").append(carryDown.newField).append("=?");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t1.").append(carryDown.newField).append("=?")
.append(")");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
*/
IResultSet set = database.performQuery(sb.toString(),newlist,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Begin an event sequence.
*@param eventName is the name of the event.
*@return true if the event could be created, or false if it's already there.
*/
public boolean beginEventSequence(String eventName)
throws ManifoldCFException
{
try
{
eventManager.createEvent(eventName);
return true;
}
catch (ManifoldCFException e)
{
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
return false;
throw e;
}
}
/** Complete an event sequence.
*@param eventName is the name of the event.
*/
public void completeEventSequence(String eventName)
throws ManifoldCFException
{
eventManager.destroyEvent(eventName);
}
/** Requeue a document set because of carrydown changes.
* This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the
* extent that if one is *already* being processed, it will need to be done over again.
*@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed.
*@param docPriorities are the document priorities to assign to the documents, if needed.
*@return a flag for each document priority, true if it was used, false otherwise.
*/
public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new boolean[0];
// Order the updates by document hash, to prevent deadlock as much as possible.
// This map contains the original index of the document id hash.
HashMap docHashMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID();
docHashMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort the hashes
java.util.Arrays.sort(docIDHashes);
boolean[] rval = new boolean[docIDHashes.length];
// Enter transaction and prepare to look up document states in dochash order
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// This is the map that will contain the rows we found, keyed by docIDHash.
HashMap existingRows = new HashMap();
// Loop through hashes in order
int j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
// Get the index
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
// Lookup document description
DocumentDescription dd = documentDescriptions[originalIndex];
// Do the query. We can base this on the id column since we have that.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// If the row is there, we use its current info to requeue it properly.
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
j++;
}
// Ok, existingRows contains all the rows we want to try to update. Go through these and update.
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It wasn't found, so the doc priority wasn't used.
rval[originalIndex] = false;
else
// It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time.
rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,true,docPriorities[originalIndex],null);
j++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Requeue a document because of carrydown changes.
* This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the
* extent that if it is *already* being processed, it will need to be done over again.
*@param documentDescription is the description object for the document that has had its parent carrydown information changed.
*@param docPriority is the document priority to assign to the document, if needed.
*@return a flag for the document priority, true if it was used, false otherwise.
*/
public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority)
throws ManifoldCFException
{
return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0];
}
/** Sleep a random amount of time after a transaction abort.
*/
protected long getRandomAmount()
{
return database.getSleepAmt();
}
protected void sleepFor(long amt)
throws ManifoldCFException
{
database.sleepFor(amt);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public String[] retrieveParentData(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValues(jobID,docIDHash,dataName);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName);
}
// These methods support the job threads (which start jobs and end jobs)
// There is one thread that starts jobs. It simply looks for jobs which are ready to
// start, and changes their state accordingly.
// There is also a pool of threads that end jobs. These threads wait for a job that
// looks like it is done, and do completion processing if it is.
/** Start all jobs in need of starting.
* This method marks all the appropriate jobs as "in progress", which is all that should be
* needed to start them.
* It's also the case that the start event should be logged in the event log. In order to make it possible for
* the caller to do this logging, a set of job ID's will be returned containing the jobs that
* were started.
*@param currentTime is the current time in milliseconds since epoch.
*@param unwaitList is filled in with the set of job ID objects that were resumed.
*/
public void startJobs(long currentTime, ArrayList unwaitList)
throws ManifoldCFException
{
// This method should compare the lasttime field against the current time, for all
// "not active" jobs, and see if a job should be started.
//
// If a job is to be started, then the following occurs:
// (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to
// PURGATORY.
// (2) The job is labeled as "ACTIVE".
// (3) The starttime field is set.
// (4) The endtime field is nulled out.
//
// This method also assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
// Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become
// ACTIVE or PAUSED. This will occur if we have entered a new window for the job.
// Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The
// consistent practice throughout CF is to do the external locks first, then the database locks. This particular method
// thus cannot use cached job description information, because it must throw database locks first against the jobs table.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastTimeField).append(",")
.append(jobs.statusField).append(",")
.append(jobs.startMethodField).append(",")
.append(jobs.outputNameField).append(",")
.append(jobs.connectionNameField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_INACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING),
jobs.statusToString(jobs.STATUS_PAUSEDWAIT),
jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ")
.append(jobs.startMethodField).append("!=? FOR UPDATE");
list.add(jobs.startMethodToString(IJobDescription.START_DISABLE));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info
// for.
Long[] jobIDSet = new Long[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
jobIDSet[i++] = (Long)row.getValue(jobs.idField);
}
ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet);
i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField));
String outputName = (String)row.getValue(jobs.outputNameField);
String connectionName = (String)row.getValue(jobs.connectionNameField);
ScheduleRecord[] thisSchedule = srSet[i++];
// Run at specific times
// We need to start with the start time as given, plus one
long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1;
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+
new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString());
// Proceed to the current time, and find a match if there is one to be found.
// If not -> continue
// We go through *all* the schedule records. The one that matches that has the latest
// end time is the one we take.
Long matchTime = null;
Long duration = null;
boolean requestMinimum = false;
for (int l = 0; l < thisSchedule.length; l++)
{
long trialStartInterval = startInterval;
ScheduleRecord sr = thisSchedule[l];
Long thisDuration = sr.getDuration();
if (startMethod == IJobDescription.START_WINDOWINSIDE &&
thisDuration != null)
{
// Bump the start interval back before the beginning of the current interval.
// This will guarantee a start as long as there is time in the window.
long trialStart = currentTime - thisDuration.longValue();
if (trialStart < trialStartInterval)
trialStartInterval = trialStart;
}
Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime,
sr.getDayOfWeek(),
sr.getDayOfMonth(),
sr.getMonthOfYear(),
sr.getYear(),
sr.getHourOfDay(),
sr.getMinutesOfHour(),
sr.getTimezone(),
thisDuration);
if (thisMatchTime == null)
{
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
continue;
}
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
if (matchTime == null || thisDuration == null ||
(duration != null && thisMatchTime.longValue() + thisDuration.longValue() >
matchTime.longValue() + duration.longValue()))
{
matchTime = thisMatchTime;
duration = thisDuration;
requestMinimum = sr.getRequestMinimum();
}
}
if (matchTime == null)
{
jobs.updateLastTime(jobID,currentTime);
continue;
}
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Calculate the end of the window
Long windowEnd = null;
if (duration != null)
{
windowEnd = new Long(matchTime.longValue()+duration.longValue());
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+
matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")");
}
int newJobState;
switch (status)
{
case Jobs.STATUS_INACTIVE:
// If job was formerly "inactive", do the full startup.
// Start this job! but with no end time.
// This does not get logged because the startup thread does the logging.
jobs.startJob(jobID,windowEnd,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Signalled for job start for job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
default:
break;
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put active or paused jobs in wait state, if they've exceeded their window.
*@param currentTime is the current time in milliseconds since epoch.
*@param waitList is filled in with the set of job ID's that were put into a wait state.
*/
public void waitJobs(long currentTime, ArrayList waitList)
throws ManifoldCFException
{
// This method assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER),
jobs.statusToString(jobs.STATUS_PAUSED),
jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ")
.append(jobs.windowEndField).append("<? FOR UPDATE");
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
waitList.add(jobID);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Make the job wait.
switch (status)
{
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_ACTIVESEEDING:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_PAUSED:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSEDSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSINGSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
default:
break;
}
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically
* be called after a job's scheduling window has been changed.
*@param jobID is the job identifier.
*/
public void resetJobSchedule(Long jobID)
throws ManifoldCFException
{
// Note: This is problematic; the expected behavior is for the job to start if "we are within the window",
// but not to start if the transition to active status was long enough ago.
// Since there's no "right" way to do this, do nothing for now.
// This explicitly did NOT work - it caused the job to refire every time it was saved.
// jobs.updateLastTime(jobID,0L);
}
/** Check if the specified job parameters have a 'hit' within the specified interval.
*@param startTime is the start time.
*@param currentTimestamp is the end time.
*@param daysOfWeek is the enumerated days of the week, or null.
*@param daysOfMonth is the enumerated days of the month, or null.
*@param months is the enumerated months, or null.
*@param years is the enumerated years, or null.
*@param hours is the enumerated hours, or null.
*@param minutes is the enumerated minutes, or null.
*@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds
* from epoch is returned.
*/
protected static Long checkTimeMatch(long startTime, long currentTimestamp,
EnumeratedValues daysOfWeek,
EnumeratedValues daysOfMonth,
EnumeratedValues months,
EnumeratedValues years,
EnumeratedValues hours,
EnumeratedValues minutes,
String timezone,
Long duration)
{
// What we do here is start with the previous timestamp, and advance until we
// either encounter a match, or we exceed the current timestamp.
Calendar c;
if (timezone == null)
{
c = Calendar.getInstance();
}
else
{
c = Calendar.getInstance(TimeZone.getTimeZone(timezone));
}
// Get the current starting time
c.setTimeInMillis(startTime);
// If there's a duration value, we can't match unless we're within the window.
// That means we find a match, and then we verify that the end time is greater than the currenttimestamp.
// If not, we move on (by incrementing)
// The main loop works off of the calendar and these values.
while (c.getTimeInMillis() < currentTimestamp)
{
// Round up to the nearest minute, unless at 0 already
int x = c.get(Calendar.MILLISECOND);
if (x != c.getMinimum(Calendar.MILLISECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MILLISECOND,amtToAdd);
continue;
}
x = c.get(Calendar.SECOND);
if (x != c.getMinimum(Calendar.SECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.SECOND,amtToAdd);
continue;
}
boolean startedToCareYet = false;
x = c.get(Calendar.MINUTE);
// If we care about minutes, round up, otherwise go to the 0 value
if (minutes == null)
{
if (x != c.getMinimum(Calendar.MINUTE))
{
int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MINUTE,amtToAdd);
continue;
}
}
else
{
// See if it is a legit value.
if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE)))
{
// Advance to next legit value
// We could be clever, but we just advance one
c.add(Calendar.MINUTE,1);
continue;
}
startedToCareYet = true;
}
// Hours
x = c.get(Calendar.HOUR_OF_DAY);
if (hours == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY))
{
int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.HOUR_OF_DAY,amtToAdd);
continue;
}
}
else
{
if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY)))
{
// next hour
c.add(Calendar.HOUR_OF_DAY,1);
continue;
}
startedToCareYet = true;
}
// Days of month and days of week are at the same level;
// these advance concurrently. However, if NEITHER is specified, and nothing
// earlier was, then we do the 1st of the month.
x = c.get(Calendar.DAY_OF_WEEK);
if (daysOfWeek != null)
{
if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK)))
{
// next day
c.add(Calendar.DAY_OF_WEEK,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.DAY_OF_MONTH);
if (daysOfMonth == null)
{
// If nothing is specified but the month or the year, do it on the 1st.
if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH))
{
// Move as rapidly as possible towards the first of the month. But in no case, increment
// less than one day.
int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.DAY_OF_MONTH,amtToAdd);
continue;
}
}
else
{
if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH)))
{
// next day
c.add(Calendar.DAY_OF_MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.MONTH);
if (months == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH))
{
int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MONTH,amtToAdd);
continue;
}
}
else
{
if (!months.checkValue(x-c.getMinimum(Calendar.MONTH)))
{
c.add(Calendar.MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.YEAR);
if (years != null)
{
if (!years.checkValue(x))
{
c.add(Calendar.YEAR,1);
continue;
}
startedToCareYet = true;
}
// Looks like a match.
// Last check is to be sure we are in the window, if any. If we are outside the window,
// must skip forward.
if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp)
{
c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND));
continue;
}
return new Long(c.getTimeInMillis());
}
return null;
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*/
public void manualStart(Long jobID)
throws ManifoldCFException
{
manualStart(jobID,false);
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualStart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() < 1)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
if (status != Jobs.STATUS_INACTIVE)
throw new ManifoldCFException("Job "+jobID+" is already running");
IJobDescription jobDescription = jobs.load(jobID,true);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually starting job "+jobID);
}
// Start this job! but with no end time.
jobs.startJob(jobID,null,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent");
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Note job delete started.
*@param jobID is the job id.
*@param startTime is the job delete start time.
*/
public void noteJobDeleteStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobDeleteStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" delete is now started");
}
/** Note job started.
*@param jobID is the job id.
*@param startTime is the job start time.
*/
public void noteJobStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" is now started");
}
/** Note job seeded.
*@param jobID is the job id.
*@param seedTime is the job seed time.
*/
public void noteJobSeeded(Long jobID, long seedTime)
throws ManifoldCFException
{
jobs.noteJobSeeded(jobID,seedTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" has been successfully reseeded");
}
/** Prepare for a delete scan.
*@param jobID is the job id.
*/
public void prepareDeleteScan(Long jobID)
throws ManifoldCFException
{
// No special treatment needed for hopcount or carrydown, since these all get deleted at once
// at the end of the job delete process.
TrackerClass.notePrecommit();
jobQueue.prepareDeleteScan(jobID);
TrackerClass.noteCommit();
}
/** Prepare a job to be run.
* This method is called regardless of the details of the job; what differs is only the flags that are passed in.
* The code inside will determine the appropriate procedures.
* (This method replaces prepareFullScan() and prepareIncrementalScan(). )
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@param connectorModel is the model used by the connector for the job.
*@param continuousJob is true if the job is a continuous one.
*@param fromBeginningOfTime is true if the job is running starting from time 0.
*@param requestMinimum is true if the minimal amount of work is requested for the job run.
*/
public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod,
int connectorModel, boolean continuousJob, boolean fromBeginningOfTime,
boolean requestMinimum)
throws ManifoldCFException
{
// (1) If the connector has MODEL_ADD_CHANGE_DELETE, then
// we let the connector run the show; there's no purge phase, and therefore the
// documents are left in a COMPLETED state if they don't show up in the list
// of seeds that require the attention of the connector. However, we do need to
// preload the queue with all the existing documents, if there was any change to the
// specification information (which will mean that fromBeginningOfTime is set).
//
// (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so
// we do a full scan initialization.
//
// (3) If the connector has some other model, we look at the start time. A start
// time of 0 implies a full scan, while any other start time implies an incremental
// scan.
// Complete connector model is told everything, so no delete phase.
if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
return;
}
// If the connector model is complete via chaining, then we just need to make
// sure discovery works to queue the changes.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
else
jobQueue.preparePartialScan(jobID);
return;
}
// Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless
// the job criteria have changed.
if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime)
{
// If it is a chained model, do the partial prep.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD ||
connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE)
jobQueue.preparePartialScan(jobID);
return;
}
if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL &&
(connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime))
prepareFullScan(jobID,legalLinkTypes,hopcountMethod);
else
jobQueue.prepareIncrementalScan(jobID);
}
/** Queue all existing.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*/
protected void queueAllExisting(Long jobID, String[] legalLinkTypes)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
if (legalLinkTypes.length > 0)
{
jobQueue.reactivateHopcountRemovedRecords(jobID);
}
jobQueue.queueAllExisting(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Prepare for a full scan.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// Since we delete documents here, we need to manage the hopcount part of the world too.
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Delete the documents we have never fetched, including any hopcount records we've calculated.
if (legalLinkTypes.length > 0)
{
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t99."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})});
hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99",
"t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField,
query,list,
hopcountMethod);
}
jobQueue.prepareFullScan(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Manually abort a running job. The job will be permanently stopped, and will not run again until
* automatically started based on schedule, or manually started.
*@param jobID is the job to abort.
*/
public void manualAbort(Long jobID)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually aborting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortJob(jobID,null);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualAbortRestart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortRestartJob(jobID,requestMinimum);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" restart signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*/
public void manualAbortRestart(Long jobID)
throws ManifoldCFException
{
manualAbortRestart(jobID,false);
}
/** Abort a running job due to a fatal error condition.
*@param jobID is the job to abort.
*@param errorText is the error text.
*@return true if this is the first logged abort request for this job.
*/
public boolean errorAbort(Long jobID, String errorText)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'");
}
boolean rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
rval = jobs.abortJob(jobID,errorText);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (rval && Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
return rval;
}
/** Pause a job.
*@param jobID is the job identifier to pause.
*/
public void pauseJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually pausing job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.pauseJob(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully paused");
}
}
/** Restart a paused job.
*@param jobID is the job identifier to restart.
*/
public void restartJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting paused job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.restartJob(jobID);
jobQueue.clearFailTimes(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully restarted");
}
}
/** Get the list of jobs that are ready for seeding.
*@return jobs that are active and are running in adaptive mode. These will be seeded
* based on what the connector says should be added to the queue.
*/
public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.reseedIntervalField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ")
.append(jobs.typeField).append("=? AND ")
.append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)")
.append(" FOR UPDATE");
list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS));
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
Long r = (Long)row.getValue(jobs.reseedIntervalField);
Long reseedTime;
if (r != null)
reseedTime = new Long(currentTime + r.longValue());
else
reseedTime = null;
// Mark status of job as "active/seeding". Special status is needed so that abort
// will not complete until seeding is completed.
jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for seeding");
}
rval[i] = new JobSeedingRecord(jobID,synchTime);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for deletion.
*@return jobs that were in the "readyfordelete" state.
*/
public JobDeleteRecord[] getJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for delete startup");
}
rval[i] = new JobDeleteRecord(jobID);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for startup.
*@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state.
*/
public JobStartRecord[] getJobsReadyForStartup()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_READYFORSTARTUP),
jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobStartRecord[] rval = new JobStartRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
// Mark status of job as "starting"
jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for startup");
}
rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Inactivate a job, from the notification state.
*@param jobID is the ID of the job to inactivate.
*/
public void inactivateJob(Long jobID)
throws ManifoldCFException
{
// While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution
// it might be a good idea to put this in a transaction and have the state get checked first.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
jobs.notificationComplete(jobID);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job starting for delete back to "ready for delete"
* state.
*@param jobID is the job id.
*/
public void resetStartDeleteJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_DELETESTARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job that is notifying back to "ready for notify"
* state.
*@param jobID is the job id.
*/
public void resetNotifyJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state");
// Set the state of the job back to "ReadyForNotify"
jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting notify job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a starting job back to "ready for startup" state.
*@param jobID is the job id.
*/
public void resetStartupJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_STARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP);
break;
case Jobs.STATUS_STARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state");
// Set the state of the job back to "ReadyForStartupMinimal"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL);
break;
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting startup job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a seeding job back to "active" state.
*@param jobID is the job id.
*/
public void resetSeedJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED);
break;
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT);
break;
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER);
break;
case Jobs.STATUS_ACTIVESEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE);
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT);
break;
case Jobs.STATUS_PAUSEDSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSED);
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT);
break;
case Jobs.STATUS_ABORTINGSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_PAUSEDWAIT:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Delete jobs in need of being deleted (which are marked "ready for delete").
* This method is meant to be called periodically to perform delete processing on jobs.
*/
public void deleteJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested
// document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other
// threads, so eventually a job will become eligible. This happens when there are no records that have an ingested
// status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory.
database.beginTransaction();
try
{
// The original query was:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND
// t1.status IN ('C', 'F', 'G'))
//
// However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Now, loop through this list. For each one, verify that it's okay to delete it
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
list.clear();
sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
ManifoldCF.noteConfigurationChange();
// Remove documents from job queue
jobQueue.deleteAllJobRecords(jobID);
// Remove carrydowns for the job
carryDown.deleteOwner(jobID);
// Nothing is in a critical section - so this should be OK.
hopCount.deleteOwner(jobID);
jobs.delete(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Removed job "+jobID);
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put all eligible jobs in the "shutting down" state.
*/
public void finishJobs()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// The jobs we should transition:
// - are active
// - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records
database.beginTransaction();
try
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','P','F','G'))
// This did not get along well with Postgresql, so instead this is what is now done:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// Mark status of job as "finishing"
jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for shutdown");
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted finishing jobs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Find the list of jobs that need to have their connectors notified of job completion.
*@return the ID's of jobs that need their output connectors notified in order to become inactive.
*/
public JobNotifyRecord[] getJobsReadyForInactivity()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Return them all
JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Found job "+jobID+" in need of notification");
}
rval[i++] = new JobNotifyRecord(jobID);
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Complete the sequence that resumes jobs, either from a pause or from a scheduling window
* wait. The logic will restore the job to an active state (many possibilities depending on
* connector status), and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed.
*/
public void finishJobResumes(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_RESUMING),
jobs.statusToString(jobs.STATUS_RESUMINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// There are no secondary checks that need to be made; just resume
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishResumeJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Resumed job "+jobID);
}
}
}
/** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling
* window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT),
* and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped.
*/
public void finishJobStops(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','F'))
// Now the query is broken up so that Postgresql behaves more efficiently.
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ABORTING),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL),
jobs.statusToString(jobs.STATUS_PAUSING),
jobs.statusToString(jobs.STATUS_PAUSINGSEEDING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index.
// See CONNECTORS-290.
// We do this BEFORE updating the job state.
jobQueue.clearDocPriorities(jobID);
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishStopJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Stopped job "+jobID);
}
}
}
/** Reset eligible jobs either back to the "inactive" state, or make them active again. The
* latter will occur if the cleanup phase of the job generated more pending documents.
*
* This method is used to pick up all jobs in the shutting down state
* whose purgatory or being-cleaned records have been all processed.
*
*@param currentTime is the current time in milliseconds since epoch.
*@param resetJobs is filled in with the set of IJobDescription objects that were reset.
*/
public void resetJobs(long currentTime, ArrayList resetJobs)
throws ManifoldCFException
{
// Query for all jobs that fulfill the criteria
// The query used to look like:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status='P')
//
// Now, the query is broken up, for performance
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PURGATORY),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// The shutting-down phase is complete. However, we need to check if there are any outstanding
// PENDING or PENDINGPURGATORY records before we can decide what to do.
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
{
// This job needs to re-enter the active state. Make that happen.
jobs.returnJobToActive(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" is re-entering active state");
}
}
else
{
// This job should be marked as finished.
IJobDescription jobDesc = jobs.load(jobID,true);
resetJobs.add(jobDesc);
jobs.finishJob(jobID,currentTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now completed");
}
}
}
}
// Status reports
/** Get the status of a job.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID)
throws ManifoldCFException
{
return getStatus(jobID,true);
}
/** Get a list of all jobs, and their status information.
*@return an ordered array of job status objects.
*/
@Override
public JobStatus[] getAllStatus()
throws ManifoldCFException
{
return getAllStatus(true);
}
/** Get a list of running jobs. This is for status reporting.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs()
throws ManifoldCFException
{
return getRunningJobs(true);
}
/** Get a list of completed jobs, and their statistics.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs()
throws ManifoldCFException
{
return getFinishedJobs(true);
}
/** Get the status of a job.
*@param jobID is the job ID.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
public JobStatus getStatus(Long jobID, boolean includeCounts)
throws ManifoldCFException
{
return getStatus(jobID, includeCounts, Integer.MAX_VALUE);
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts)
throws ManifoldCFException
{
return getAllStatus(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getRunningJobs(boolean includeCounts)
throws ManifoldCFException
{
return getRunningJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getFinishedJobs(boolean includeCounts)
throws ManifoldCFException
{
return getFinishedJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get the status of a job.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList list = new ArrayList();
String whereClause = Jobs.idField+"=?";
list.add(jobID);
JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount);
if (records.length == 0)
return null;
return records[0];
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
return makeJobStatus(null,null,includeCounts,maxCount);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList whereParams = new ArrayList();
String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new MultiClause(Jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER),
Jobs.statusToString(Jobs.STATUS_PAUSED),
Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSING),
Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_RESUMING),
Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING)
})});
return makeJobStatus(whereClause,whereParams,includeCounts,maxCount);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList whereParams = new ArrayList();
sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ")
.append(Jobs.endTimeField).append(" IS NOT NULL");
return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount);
}
// Protected methods and classes
/** Make a job status array from a query result.
*@param whereClause is the where clause for the jobs we are interested in.
*@return the status array.
*/
protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
IResultSet set = database.performQuery("SELECT t0."+
Jobs.idField+",t0."+
Jobs.descriptionField+",t0."+
Jobs.statusField+",t0."+
Jobs.startTimeField+",t0."+
Jobs.endTimeField+",t0."+
Jobs.errorField+
" FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC",
whereParams,null,null);
// Build hashes for set2 and set3
Map<Long,Long> set2Hash = new HashMap<Long,Long>();
Map<Long,Long> set3Hash = new HashMap<Long,Long>();
Map<Long,Long> set4Hash = new HashMap<Long,Long>();
Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>();
if (includeCounts)
{
// If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But
// otherwise, fire off an individual query at a time.
if (maxCount == Integer.MAX_VALUE)
{
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet countResult = database.performQuery(sb.toString(),list,null,null);
if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount)
{
// Too many items in queue; do it the hard way
buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Cheap way should still work.
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
}
}
JobStatus[] rval = new JobStatus[set.getRowCount()];
for (int i = 0; i < rval.length; i++)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
String description = row.getValue(Jobs.descriptionField).toString();
int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString());
Long startTimeValue = (Long)row.getValue(Jobs.startTimeField);
long startTime = -1;
if (startTimeValue != null)
startTime = startTimeValue.longValue();
Long endTimeValue = (Long)row.getValue(Jobs.endTimeField);
long endTime = -1;
if (endTimeValue != null)
endTime = endTimeValue.longValue();
String errorText = (String)row.getValue(Jobs.errorField);
if (errorText != null && errorText.length() == 0)
errorText = null;
int rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
switch (status)
{
case Jobs.STATUS_INACTIVE:
if (errorText != null)
rstatus = JobStatus.JOBSTATUS_ERROR;
else
{
if (startTime >= 0)
rstatus = JobStatus.JOBSTATUS_COMPLETED;
else
rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
}
break;
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED;
break;
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVESEEDING:
rstatus = JobStatus.JOBSTATUS_RUNNING;
break;
case Jobs.STATUS_SHUTTINGDOWN:
rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP;
break;
case Jobs.STATUS_READYFORNOTIFY:
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION;
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGSEEDING:
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_ABORTING;
break;
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
rstatus = JobStatus.JOBSTATUS_RESTARTING;
break;
case Jobs.STATUS_PAUSING:
case Jobs.STATUS_PAUSINGSEEDING:
case Jobs.STATUS_ACTIVEWAITING:
case Jobs.STATUS_ACTIVEWAITINGSEEDING:
case Jobs.STATUS_PAUSINGWAITING:
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
rstatus = JobStatus.JOBSTATUS_STOPPING;
break;
case Jobs.STATUS_RESUMING:
case Jobs.STATUS_RESUMINGSEEDING:
rstatus = JobStatus.JOBSTATUS_RESUMING;
break;
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_PAUSEDSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_ACTIVEWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_WINDOWWAIT;
break;
case Jobs.STATUS_PAUSEDWAIT:
case Jobs.STATUS_PAUSEDWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_STARTINGUP:
case Jobs.STATUS_STARTINGUPMINIMAL:
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_STARTING;
break;
case Jobs.STATUS_DELETESTARTINGUP:
case Jobs.STATUS_READYFORDELETE:
case Jobs.STATUS_DELETING:
case Jobs.STATUS_DELETING_NOOUTPUT:
rstatus = JobStatus.JOBSTATUS_DESTRUCTING;
break;
default:
break;
}
Long set2Value = set2Hash.get(jobID);
Long set3Value = set3Hash.get(jobID);
Long set4Value = set4Hash.get(jobID);
Boolean set2ExactValue = set2Exact.get(jobID);
Boolean set3ExactValue = set3Exact.get(jobID);
Boolean set4ExactValue = set4Exact.get(jobID);
rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()),
((set3Value==null)?0L:set3Value.longValue()),
((set4Value==null)?0L:set4Value.longValue()),
((set2ExactValue==null)?true:set2ExactValue.booleanValue()),
((set3ExactValue==null)?true:set3ExactValue.booleanValue()),
((set4ExactValue==null)?true:set4ExactValue.booleanValue()),
startTime,endTime,errorText);
}
return rval;
}
protected static ClauseDescription buildOutstandingClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_ACTIVE),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN),
JobQueue.statusToString(JobQueue.STATUS_PENDING),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected static ClauseDescription buildProcessedClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_COMPLETE),
JobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
JobQueue.statusToString(JobQueue.STATUS_PURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
// Fire off an individual query with a limit for each job
// First, get the list of jobs that we are interested in.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0");
if (whereClause != null)
{
sb.append(" WHERE ")
.append(whereClause);
if (whereParams != null)
list.addAll(whereParams);
}
IResultSet jobSet = database.performQuery(sb.toString(),list,null,null);
// Scan the set of jobs
for (int i = 0; i < jobSet.getRowCount(); i++)
{
IResultRow row = jobSet.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
// Now, for each job, fire off a separate, limited, query for each count we care about
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet totalSet = database.performQuery(sb.toString(),list,null,null);
if (totalSet.getRowCount() > 0)
{
long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set2Hash.put(jobID,new Long(maxCount));
set2Exact.put(jobID,new Boolean(false));
}
else
{
set2Hash.put(jobID,new Long(rowCount));
set2Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null);
if (outstandingSet.getRowCount() > 0)
{
long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set3Hash.put(jobID,new Long(maxCount));
set3Exact.put(jobID,new Boolean(false));
}
else
{
set3Hash.put(jobID,new Long(rowCount));
set3Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet processedSet = database.performQuery(sb.toString(),list,null,null);
if (processedSet.getRowCount() > 0)
{
long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set4Hash.put(jobID,new Long(maxCount));
set4Exact.put(jobID,new Boolean(false));
}
else
{
set4Hash.put(jobID,new Long(rowCount));
set4Exact.put(jobID,new Boolean(true));
}
}
}
}
protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set2 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set3 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set4 = database.performQuery(sb.toString(),list,null,null);
for (int j = 0; j < set2.getRowCount(); j++)
{
IResultRow row = set2.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set2Hash.put(jobID,(Long)row.getValue("doccount"));
set2Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set3.getRowCount(); j++)
{
IResultRow row = set3.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set3Hash.put(jobID,(Long)row.getValue("doccount"));
set3Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set4.getRowCount(); j++)
{
IResultRow row = set4.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set4Hash.put(jobID,(Long)row.getValue("doccount"));
set4Exact.put(jobID,new Boolean(true));
}
}
protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent)
{
if (whereClause != null)
{
if (wherePresent)
sb.append(" AND");
else
sb.append(" WHERE");
sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ")
.append(whereClause)
.append(")");
if (whereParams != null)
list.addAll(whereParams);
}
}
// These methods generate reports for direct display in the UI.
/** Run a 'document status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the
* "retrylimit" column are long values representing a time; all other values will be user-friendly strings.
*/
public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
int startRow, int rowCount)
throws ManifoldCFException
{
// Build the query.
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(" AS id,")
.append("t0.").append(jobQueue.docIDField).append(" AS identifier,")
.append("t1.").append(jobs.descriptionField).append(" AS job,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'")
.append(" ELSE 'Unknown'")
.append(" END AS state,")
.append("CASE")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Inactive'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 'Waiting forever'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append("=?")
.append(" THEN 'Hopcount exceeded'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Deleting'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Expiring'")
.append(" ELSE 'Unknown'")
.append(" END AS status,")
.append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'")
.append(" ELSE 'Unknown'")
.append(" END AS action,")
.append("t0.").append(jobQueue.failCountField).append(" AS retrycount,")
.append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit")
.append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)}));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
addCriteria(sb,list,"t0.",connectionName,filterCriteria,true);
// The intrinsic ordering is provided by the "id" column, and nothing else.
addOrdering(sb,new String[]{"id"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
/** Run a 'queue status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param idBucketDescription is the bucket description for generating the identifier class.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting,
processready, expireready, processwaiting, expirewaiting
*/
public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
BucketDescription idBucketDescription, int startRow, int rowCount)
throws ManifoldCFException
{
// SELECT substring(docid FROM '<id_regexp>') AS idbucket,
// substring(entityidentifier FROM '<id_regexp>') AS idbucket,
// SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria>
// GROUP BY idbucket
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,")
.append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,")
.append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT ");
addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription);
sb.append(" AS idbucket,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" AS inactive,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processing,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expiring,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as deleting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expireready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processwaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expirewaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as waitingforever,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as hopcountexceeded");
sb.append(" FROM ").append(jobQueue.getTableName());
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
addCriteria(sb,list,"",connectionName,filterCriteria,false);
sb.append(") t1 GROUP BY idbucket");
addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
// Protected methods for report generation
/** Turn a bucket description into a return column.
* This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is
* desired, that means we whack the whole thing to lower case before doing the match.
*/
protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc)
{
boolean isSensitive = bucketDesc.isSensitive();
list.add(bucketDesc.getRegexp());
sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive));
}
/** Add criteria clauses to query.
*/
protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted)
throws ManifoldCFException
{
Long[] matchingJobs = criteria.getJobs();
if (matchingJobs != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
if (matchingJobs.length == 0)
{
sb.append("0>1");
}
else
{
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)}));
}
}
RegExpCriteria identifierRegexp = criteria.getIdentifierMatch();
if (identifierRegexp != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
list.add(identifierRegexp.getRegexpString());
sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive()));
}
Long nowTime = new Long(criteria.getNowTime());
int[] states = criteria.getMatchingStates();
int[] statuses = criteria.getMatchingStatuses();
if (states.length == 0 || statuses.length == 0)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("0>1");
return whereEmitted;
}
// Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex.
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
int k = 0;
while (k < states.length)
{
int stateValue = states[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATE_NEVERPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})}));
break;
case DOCSTATE_PREVIOUSLYPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATE_OUTOFSCOPE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
k = 0;
while (k < statuses.length)
{
int stateValue = statuses[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATUS_INACTIVE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATUS_PROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))}));
break;
case DOCSTATUS_EXPIRING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))}));
break;
case DOCSTATUS_DELETING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})}));
break;
case DOCSTATUS_READYFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_READYFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_WAITINGFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREVER:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL");
break;
case DOCSTATUS_HOPCOUNTEXCEEDED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
return whereEmitted;
}
/** Emit a WHERE or an AND, depending...
*/
protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted)
{
if (whereEmitted)
sb.append(" AND ");
else
sb.append(" WHERE ");
return true;
}
/** Add ordering.
*/
protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort)
{
// Keep track of the fields we've seen
Map hash = new HashMap();
// Emit the "Order by"
sb.append(" ORDER BY ");
// Go through the specified list
int i = 0;
int count = sort.getCount();
while (i < count)
{
if (i > 0)
sb.append(",");
String column = sort.getColumn(i);
sb.append(column);
if (sort.getDirection(i) == sort.SORT_ASCENDING)
sb.append(" ASC");
else
sb.append(" DESC");
hash.put(column,column);
i++;
}
// Now, go through the complete field list, and emit sort criteria for everything
// not actually specified. This is so LIMIT and OFFSET give consistent results.
int j = 0;
while (j < completeFieldList.length)
{
String field = completeFieldList[j];
if (hash.get(field) == null)
{
if (i > 0)
sb.append(",");
sb.append(field);
sb.append(" DESC");
//if (j == 0)
// sb.append(" DESC");
//else
// sb.append(" ASC");
i++;
}
j++;
}
}
/** Add limit and offset.
*/
protected void addLimits(StringBuilder sb, int startRow, int maxRowCount)
{
sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount));
}
/** Class for tracking existing jobqueue row data */
protected static class JobqueueRecord
{
protected Long recordID;
protected int status;
protected Long checkTimeValue;
public JobqueueRecord(Long recordID, int status, Long checkTimeValue)
{
this.recordID = recordID;
this.status = status;
this.checkTimeValue = checkTimeValue;
}
public Long getRecordID()
{
return recordID;
}
public int getStatus()
{
return status;
}
public Long getCheckTimeValue()
{
return checkTimeValue;
}
}
/** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */
private static int EXTRA_FACTOR = 2;
/** This class provides the throttling limits for the job queueing query.
*/
protected static class ThrottleLimit implements ILimitChecker
{
// For each connection, there is (a) a number (which is the maximum per bin), and (b)
// a current running count per bin. These are stored as elements in a hash map.
protected HashMap connectionMap = new HashMap();
// The maximum number of jobs that have reached their chunk size limit that we
// need
protected int n;
// This is the hash table that maps a job ID to the object that tracks the number
// of documents already accumulated for this resultset. The count of the number
// of queue records we have is tallied by going through each job in this table
// and adding the records outstanding for it.
protected HashMap jobQueueHash = new HashMap();
// This is the map from jobid to connection name
protected HashMap jobConnection = new HashMap();
// This is the set of allowed connection names. We discard all documents that are
// not from that set.
protected HashMap activeConnections = new HashMap();
// This is the number of documents per set per connection.
protected HashMap setSizes = new HashMap();
// These are the individual connection maximums, keyed by connection name.
protected HashMap maxConnectionCounts = new HashMap();
// This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit.
protected int maxSetSize = 0;
// This is the number of documents processed so far
protected int documentsProcessed = 0;
// This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects.
protected ArrayList blockingDocumentArray = new ArrayList();
// Cutoff time for documents eligible for prioritization
protected long prioritizationTime;
/** Constructor.
* This class is built up piecemeal, so the constructor does nothing.
*@param n is the maximum number of full job descriptions we want at this time.
*/
public ThrottleLimit(int n, long prioritizationTime)
{
this.n = n;
this.prioritizationTime = prioritizationTime;
Logging.perf.debug("Limit instance created");
}
/** Transfer blocking documents discovered to BlockingDocuments object */
public void tallyBlockingDocuments(BlockingDocuments blockingDocuments)
{
int i = 0;
while (i < blockingDocumentArray.size())
{
DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++);
blockingDocuments.addBlockingDocument(dd);
}
blockingDocumentArray.clear();
}
/** Add a job/connection name map entry.
*@param jobID is the job id.
*@param connectionName is the connection name.
*/
public void addJob(Long jobID, String connectionName)
{
jobConnection.put(jobID,connectionName);
}
/** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation.
*@param connectionName is the connection name.
*/
public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance)
throws ManifoldCFException
{
activeConnections.put(connectionName,connectorInstance);
int setSize = connectorInstance.getMaxDocumentRequest();
setSizes.put(connectionName,new Integer(setSize));
if (setSize > maxSetSize)
maxSetSize = setSize;
}
/** Add a document limit for a specified connection. This is the limit across all matching bins; if any
* individual matching bin exceeds that limit, then documents that belong to that bin will be excluded.
*@param connectionName is the connection name.
*@param regexp is the regular expression, which we will match against various bins.
*@param upperLimit is the maximum count associated with the specified job.
*/
public void addLimit(String connectionName, String regexp, int upperLimit)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'");
ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName);
if (ji == null)
{
ji = new ThrottleJobItem();
connectionMap.put(connectionName,ji);
}
ji.addLimit(regexp,upperLimit);
}
/** Set a connection-based total document limit.
*/
public void setConnectionLimit(String connectionName, int maxDocuments)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName);
maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments));
}
/** See if this class can be legitimately compared against another of
* the same type.
*@return true if comparisons will ever return "true".
*/
public boolean doesCompareWork()
{
return false;
}
/** Create a duplicate of this class instance. All current state should be preserved.
* NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot
* be cached, and therefore duplicate() is never called from the query executor. But it can
* be called from other places.
*@return the duplicate.
*/
public ILimitChecker duplicate()
{
return makeDeepCopy();
}
/** Make a deep copy */
public ThrottleLimit makeDeepCopy()
{
ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime);
// Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes)
// do not need a deep copy.
rval.activeConnections = activeConnections;
rval.setSizes = setSizes;
rval.maxConnectionCounts = maxConnectionCounts;
rval.maxSetSize = maxSetSize;
rval.jobConnection = jobConnection;
// The structures where counts are maintained DO need a deep copy.
rval.documentsProcessed = documentsProcessed;
Iterator iter;
iter = connectionMap.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate());
}
iter = jobQueueHash.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate());
}
return rval;
}
/** Find the hashcode for this class. This will only ever be used if
* doesCompareWork() returns true.
*@return the hashcode.
*/
public int hashCode()
{
return 0;
}
/** Compare two objects and see if equal. This will only ever be used
* if doesCompareWork() returns true.
*@param object is the object to compare against.
*@return true if equal.
*/
public boolean equals(Object object)
{
return false;
}
/** Get the remaining documents we should query for.
*@return the maximal remaining count.
*/
public int getRemainingDocuments()
{
return EXTRA_FACTOR * n * maxSetSize - documentsProcessed;
}
/** See if a result row should be included in the final result set.
*@param row is the result row to check.
*@return true if it should be included, false otherwise.
*/
public boolean checkInclude(IResultRow row)
throws ManifoldCFException
{
// Note: This method does two things: First, it insures that the number of documents per job per bin does
// not exceed the calculated throttle number. Second, it keeps track of how many document queue items
// will be needed, so we can stop when we've got enough for the moment.
Logging.perf.debug("Checking if row should be included");
// This is the end that does the work.
// The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField
Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField);
// Get the connection name for this row
String connectionName = (String)jobConnection.get(jobIDValue);
if (connectionName == null)
{
Logging.perf.debug(" Row does not have an eligible job - excluding");
return false;
}
IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName);
if (connectorInstance == null)
{
Logging.perf.debug(" Row does not have an eligible connector instance - excluding");
return false;
}
// Find the connection limit for this document
MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName);
if (connectionLimit != null)
{
if (connectionLimit.intValue() == 0)
{
Logging.perf.debug(" Row exceeds its connection limit - excluding");
return false;
}
connectionLimit.decrement();
}
// Tally this item in the job queue hash, so we can detect when to stop
QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue);
if (queueItem == null)
{
// Need to talk to the connector to get a max number of docs per chunk
int maxCount = ((Integer)setSizes.get(connectionName)).intValue();
queueItem = new QueueHashItem(maxCount);
jobQueueHash.put(jobIDValue,queueItem);
}
String docIDHash = (String)row.getValue(JobQueue.docHashField);
String docID = (String)row.getValue(JobQueue.docIDField);
// Figure out what the right bins are, given the data we have.
// This will involve a call to the connector.
String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID);
// Keep the running count, so we can abort without going through the whole set.
documentsProcessed++;
//scanRecord.addBins(binNames);
ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName);
// If there is no schedule-based throttling on this connection, we're done.
if (item == null)
{
queueItem.addDocument();
Logging.perf.debug(" Row has no throttling - including");
return true;
}
int j = 0;
while (j < binNames.length)
{
if (item.isEmpty(binNames[j]))
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding");
Object o = row.getValue(JobQueue.prioritySetField);
if (o == null || ((Long)o).longValue() <= prioritizationTime)
{
// Need to add a document descriptor based on this row to the blockingDocuments object!
// This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't
// be there.
Long id = (Long)row.getValue(JobQueue.idField);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID);
blockingDocumentArray.add(dd);
}
return false;
}
j++;
}
j = 0;
while (j < binNames.length)
{
item.decrement(binNames[j++]);
}
queueItem.addDocument();
Logging.perf.debug(" Including!");
return true;
}
/** See if we should examine another row.
*@return true if we need to keep going, or false if we are done.
*/
public boolean checkContinue()
throws ManifoldCFException
{
if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize)
return false;
// If the number of chunks exceeds n, we are done
Iterator iter = jobQueueHash.keySet().iterator();
int count = 0;
while (iter.hasNext())
{
Long jobID = (Long)iter.next();
QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID);
count += item.getChunkCount();
if (count > n)
return false;
}
return true;
}
}
/** This class contains information per job on how many queue items have so far been accumulated.
*/
protected static class QueueHashItem
{
// The number of items per chunk for this job
int itemsPerChunk;
// The number of chunks so far, INCLUDING incomplete chunks
int chunkCount = 0;
// The number of documents in the current incomplete chunk
int currentDocumentCount = 0;
/** Construct.
*@param itemsPerChunk is the number of items per chunk for this job.
*/
public QueueHashItem(int itemsPerChunk)
{
this.itemsPerChunk = itemsPerChunk;
}
/** Duplicate. */
public QueueHashItem duplicate()
{
QueueHashItem rval = new QueueHashItem(itemsPerChunk);
rval.chunkCount = chunkCount;
rval.currentDocumentCount = currentDocumentCount;
return rval;
}
/** Add a document to this job.
*/
public void addDocument()
{
currentDocumentCount++;
if (currentDocumentCount == 1)
chunkCount++;
if (currentDocumentCount == itemsPerChunk)
currentDocumentCount = 0;
}
/** Get the number of chunks.
*@return the number of chunks.
*/
public int getChunkCount()
{
return chunkCount;
}
}
/** This class represents the information stored PER JOB in the throttling structure.
* In this structure, "remaining" counts are kept for each bin. When the bin becomes empty,
* then no more documents that would map to that bin will be returned, for this query.
*
* The way in which the maximum count per bin is determined is not part of this class.
*/
protected static class ThrottleJobItem
{
/** These are the bin limits. This is an array of ThrottleLimitSpec objects. */
protected ArrayList throttleLimits = new ArrayList();
/** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be
* the same as maxBinCount. */
protected HashMap binCounts = new HashMap();
/** Constructor. */
public ThrottleJobItem()
{
}
/** Add a bin limit.
*@param regexp is the regular expression describing the bins to which the limit applies to.
*@param maxCount is the maximum number of fetches allowed for that bin.
*/
public void addLimit(String regexp, int maxCount)
{
try
{
throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount));
}
catch (PatternSyntaxException e)
{
// Ignore the bad entry; it just won't contribute any throttling.
}
}
/** Create a duplicate of this item.
*@return the duplicate.
*/
public ThrottleJobItem duplicate()
{
ThrottleJobItem rval = new ThrottleJobItem();
rval.throttleLimits = throttleLimits;
Iterator iter = binCounts.keySet().iterator();
while (iter.hasNext())
{
String key = (String)iter.next();
this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate());
}
return rval;
}
/** Check if the specified bin is empty.
*@param binName is the bin name.
*@return true if empty.
*/
public boolean isEmpty(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
int remaining;
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return false;
remaining = x;
}
else
remaining = value.intValue();
return (remaining == 0);
}
/** Decrement specified bin.
*@param binName is the bin name.
*/
public void decrement(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return;
value = new MutableInteger(x);
binCounts.put(binName,value);
}
value.decrement();
}
/** Given a bin name, find the max value for it using the regexps that are in place.
*@param binName is the bin name.
*@return the max count for that bin, or -1 if infinite.
*/
protected int findMaxCount(String binName)
{
// Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com"
//
// We want to be able to do a couple of different kinds of things easily. For example, we want to:
// - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains
// - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we
// can establish a faster rate for .com than for foo.metacarta.com
//
// The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number:
// ^[^\.] = 8
//
// To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate
// value be chosen when more than one regexp match is found:
// ^[^\.] = 8
// ^foo\.metacarta\.com = 4
//
// To apply different rates for different levels:
// ^[^\.] = 8
// ^\.[^\.]*\.[^\.]*$ = 20
// ^\.[^\.]*$ = 40
//
// If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be
// more what the world wants to do (restrict, rather than increase, fetch rates).
int maxCount = -1;
int i = 0;
while (i < throttleLimits.size())
{
ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++);
Pattern p = spec.getRegexp();
Matcher m = p.matcher(binName);
if (m.find())
{
int limit = spec.getMaxCount();
if (maxCount == -1 || limit < maxCount)
maxCount = limit;
}
}
return maxCount;
}
}
/** This is a class which describes an individual throttle limit, in fetches. */
protected static class ThrottleLimitSpec
{
/** Regexp */
protected Pattern regexp;
/** The fetch limit for all bins matching that regexp */
protected int maxCount;
/** Constructor */
public ThrottleLimitSpec(String regexp, int maxCount)
throws PatternSyntaxException
{
this.regexp = Pattern.compile(regexp);
this.maxCount = maxCount;
}
/** Get the regexp. */
public Pattern getRegexp()
{
return regexp;
}
/** Get the max count */
public int getMaxCount()
{
return maxCount;
}
}
/** Mutable integer class.
*/
protected static class MutableInteger
{
int value;
/** Construct.
*/
public MutableInteger(int value)
{
this.value = value;
}
/** Duplicate */
public MutableInteger duplicate()
{
return new MutableInteger(value);
}
/** Decrement.
*/
public void decrement()
{
value--;
}
/** Increment.
*/
public void increment()
{
value++;
}
/** Get value.
*/
public int intValue()
{
return value;
}
}
}
| public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which belong to a job that's in a "shutting down" state and are in
// a "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM
// jobs t3 WHERE t0.jobid=t3.id AND t3.status='X')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.cleaningJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the cleaning queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock. This means we don't need a FOR UPDATE on the query.
lockManager.enterWriteLock(cleanStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned".
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ")
.append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name and output connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.setCleaningStatus(dd.getID());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return new DocumentSetAndFlags(rval,rvalBoolean);
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(cleanStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Create a composite document hash key. This consists of the document id hash plus the
* connection name.
*/
protected static String makeCompositeID(String docIDHash, String connectionName)
{
return docIDHash + ":" + connectionName;
}
/** Get list of deletable document descriptions. This list will take into account
* multiple jobs that may own the same document. All documents for which a description
* is returned will be transitioned to the "beingdeleted" state. Documents which are
* not in transition and are eligible, but are owned by other jobs, will have their
* jobqueue entries deleted by this method.
*@param maxCount is the maximum number of documents to return.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@return the document descriptions for these documents.
*/
public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime)
throws ManifoldCFException
{
// The query will be built here, because it joins the jobs table against the jobqueue
// table.
//
// This query must only pick up documents that are not active in any job and
// which either belong to a job that's in a "delete pending" state and are in
// a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job
// that's in a "shutting down" state and are in the "purgatory" state.
//
// We are in fact more conservative in this query than we need to be; the documents
// excluded will include some that simply match our criteria, which is designed to
// be fast rather than perfect. The match we make is: hashvalue against hashvalue, and
// different job id's.
//
// SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='D')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','B'))
//
// Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or
// ingestion.
// Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time.
if (!jobs.deletingJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to put on the delete queue");
}
while (true)
{
long sleepAmt = 0L;
// Enter a write lock so that multiple threads can't be in here at the same time
lockManager.enterWriteLock(deleteStufferLock);
try
{
database.beginTransaction();
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue");
// Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted".
// If FOR UPDATE was included, deadlock happened a lot.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(",")
.append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.failTimeField).append(",")
.append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ")
.append("t0.").append(jobQueue.checkTimeField).append("<=? AND ");
list.add(new Long(currentTime));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,maxCount));
// The checktime is null field check is for backwards compatibility
IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
// We need to organize the returned set by connection name, so that we can efficiently
// use getUnindexableDocumentIdentifiers.
// This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription
// objects.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash to reduce chances of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()];
int j = 0;
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocumentID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID);
if (allowedDocIds.get(compositeDocumentID) == null)
{
// Delete this record and do NOT return it.
jobQueue.deleteRecord(dd.getID());
// What should we do about hopcount here?
// We are deleting a record which belongs to a job that is being
// cleaned up. The job itself will go away when this is done,
// and so will all the hopcount stuff pertaining to it. So, the
// treatment I've chosen here is to leave the hopcount alone and
// let the job cleanup get rid of it at the right time.
// Note: carrydown records handled in the same manner...
//carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()});
}
else
{
// Set the record status to "being deleted" and return it
rval[j++] = dd;
jobQueue.setDeletingStatus(dd.getID());
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms.");
return rval;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(deleteStufferLock);
sleepFor(sleepAmt);
}
}
}
/** Get a list of document identifiers that should actually be deleted from the index, from a list that
* might contain identifiers that are shared with other jobs, which are targeted to the same output connection.
* The input list is guaranteed to be smaller in size than maxInClauseCount for the database.
*@param documentIdentifiers is the set of document identifiers to consider.
*@param connectionName is the connection name for ALL the document identifiers.
*@param outputConnectionName is the output connection name for ALL the document identifiers.
*@return the set of documents which should be removed from the index.
*/
protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName)
throws ManifoldCFException
{
// This is where we will count the individual document id's
HashMap countMap = new HashMap();
// First thing: Compute the set of document identifier hash values to query against
HashMap map = new HashMap();
int i = 0;
while (i < documentIdentifiers.length)
{
String hash = documentIdentifiers[i++].getDocumentIdentifierHash();
map.put(hash,hash);
countMap.put(hash,new MutableInteger(0));
}
if (map.size() == 0)
return new String[0];
// Build a query
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
ArrayList docList = new ArrayList();
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
docList.add(iter.next());
}
// Note: There is a potential race condition here. One job may be running while another is in process of
// being deleted. If they share a document, then the delete task could decide to delete the document and do so right
// after the ingestion takes place in the running job, but right before the document's status is updated
// in the job queue [which would have prevented the deletion].
// Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea)
// we are stuck with the possibility of this condition, which will essentially lead to a document being
// missing from the index.
// One way of dealing with this is to treat "active" documents as already ingested, for the purpose of
// reference counting. Then these documents will not be deleted. The risk then becomes that the "active"
// document entry will not be completed (say, because of a restart), and thus the corresponding document
// will never be removed from the index.
//
// Instead, the only solution is to not queue a document for any activity that is inconsistent with activities
// that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED"
// and "BEING_CLEANED" state
// for a document. These states will allow the various queries that queue up activities to avoid documents that
// are currently being processed elsewhere.
sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ")
.append("t1.").append(jobs.connectionNameField).append("=? AND ")
.append("t1.").append(jobs.outputNameField).append("=?)");
list.add(connectionName);
list.add(outputConnectionName);
// Do the query, and then count the number of times each document identifier occurs.
IResultSet results = database.performQuery(sb.toString(),list,null,null);
i = 0;
while (i < results.getRowCount())
{
IResultRow row = results.getRow(i++);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi != null)
mi.increment();
}
// Go through and count only those that have a count of 1.
int count = 0;
iter = countMap.keySet().iterator();
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
count++;
}
String[] rval = new String[count];
iter = countMap.keySet().iterator();
count = 0;
while (iter.hasNext())
{
String docIDHash = (String)iter.next();
MutableInteger mi = (MutableInteger)countMap.get(docIDHash);
if (mi.intValue() == 1)
rval[count++] = docIDHash;
}
return rval;
}
// These methods support the reprioritization thread.
/** Get a list of already-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList list = new ArrayList();
// The desired query is:
// SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n)
sb.append("SELECT ")
.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_COMPLETE),
jobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" ");
sb.append(database.constructOffsetLimitClause(0,n));
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be
* returned by this method. Up to n document descriptions will be returned.
*@param currentTime is the current time stamp for this prioritization pass. Avoid
* picking up any documents that are labeled with this timestamp or after.
*@param n is the maximum number of document descriptions desired.
*@return the document descriptions.
*/
public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
// This query MUST return only documents that are in a pending state which belong to an active job!!!
sb.append(jobQueue.idField).append(",")
.append(jobQueue.docHashField).append(",")
.append(jobQueue.docIDField).append(",")
.append(jobQueue.jobIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobQueue.statusField,new Object[]{
JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED),
JobQueue.statusToString(jobQueue.STATUS_PENDING),
JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ")
.append(jobQueue.checkActionField).append("=?").append(" AND ");
list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN));
// Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them,
// so this will be changed to not include jobs where the priorities have been bashed to null.
//
// I've included ALL states that might have non-null doc priorities. This includes states
// corresponding to uninstalled connectors, since there is no transition that cleans out the
// document priorities in these states. The time during which a connector is uninstalled is
// expected to be short, because typically this state is the result of an installation procedure
// rather than willful action on the part of a user.
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_STARTINGUP),
Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL),
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER)
}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(database.constructOffsetLimitClause(0,n));
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField),
(Long)row.getValue(jobQueue.jobIDField),
(String)row.getValue(jobQueue.docHashField),
(String)row.getValue(jobQueue.docIDField));
i++;
}
return rval;
}
/** Save a set of document priorities. In the case where a document was eligible to have its
* priority set, but it no longer is eligible, then the provided priority will not be written.
*@param currentTime is the time in milliseconds since epoch.
*@param documentDescriptions are the document descriptions.
*@param priorities are the desired priorities.
*/
public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities)
throws ManifoldCFException
{
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
// This should be ordered by document identifier hash in order to prevent potential deadlock conditions
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Need to order the writes by doc id.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
DocumentDescription dd = documentDescriptions[index];
double priority = priorities[index];
jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString());
i++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get up to the next n documents to be expired.
* This method marks the documents whose descriptions have been returned as "being processed", or active.
* The same marking is used as is used for documents that have been queued for worker threads. The model
* is thus identical.
*
*@param n is the maximum number of records desired.
*@param currentTime is the current time.
*@return the array of document descriptions to expire.
*/
public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime)
throws ManifoldCFException
{
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning query to look for documents to expire");
}
// Put together a query with a limit of n
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT ");
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.jobIDField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField).append(",")
.append("t0.").append(jobQueue.statusField).append(",")
.append("t0.").append(jobQueue.failTimeField).append(",")
.append("t0.").append(jobQueue.failCountField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
sb.append(database.constructOffsetLimitClause(0,n));
String query = sb.toString();
// Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
//jobQueue.unconditionallyAnalyzeTables();
ArrayList answers = new ArrayList();
int repeatCount = 0;
while (true)
{
long sleepAmt = 0L;
// Enter a write lock, so only one thread can be doing this. That makes FOR UPDATE unnecessary.
lockManager.enterWriteLock(expireStufferLock);
try
{
if (Logging.perf.isDebugEnabled())
{
repeatCount++;
Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+
new Long(System.currentTimeMillis() - startTime)+" ms");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(query,list,null,null,n,null);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
HashMap connectionNameMap = new HashMap();
HashMap documentIDMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String documentIDHash = (String)row.getValue(jobQueue.docHashField);
String documentID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
// Failtime is probably not useful in this context, but we'll bring it along for completeness
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = 0;
else
failCount = (int)failCountValue.longValue();
IJobDescription jobDesc = load(jobID);
String connectionName = jobDesc.getConnectionName();
String outputConnectionName = jobDesc.getOutputConnectionName();
DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField),
jobID,documentIDHash,documentID,failTime,failCount);
String compositeDocumentID = makeCompositeID(documentIDHash,connectionName);
documentIDMap.put(compositeDocumentID,dd);
statusMap.put(compositeDocumentID,new Integer(status));
Map y = (Map)connectionNameMap.get(connectionName);
if (y == null)
{
y = new HashMap();
connectionNameMap.put(connectionName,y);
}
ArrayList x = (ArrayList)y.get(outputConnectionName);
if (x == null)
{
// New entry needed
x = new ArrayList();
y.put(outputConnectionName,x);
}
x.add(dd);
i++;
}
// For each bin, obtain a filtered answer, and enter all answers into a hash table.
// We'll then scan the result again to look up the right descriptions for return,
// and delete the ones that are owned multiply.
HashMap allowedDocIds = new HashMap();
Iterator iter = connectionNameMap.keySet().iterator();
while (iter.hasNext())
{
String connectionName = (String)iter.next();
Map y = (Map)connectionNameMap.get(connectionName);
Iterator outputIter = y.keySet().iterator();
while (outputIter.hasNext())
{
String outputConnectionName = (String)outputIter.next();
ArrayList x = (ArrayList)y.get(outputConnectionName);
// Do the filter query
DocumentDescription[] descriptions = new DocumentDescription[x.size()];
int j = 0;
while (j < descriptions.length)
{
descriptions[j] = (DocumentDescription)x.get(j);
j++;
}
String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName);
j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j++];
String key = makeCompositeID(docIDHash,connectionName);
allowedDocIds.put(key,docIDHash);
}
}
}
// Now, assemble a result, and change the state of the records accordingly
// First thing to do is order by document hash, so we reduce the risk of deadlock.
String[] compositeIDArray = new String[documentIDMap.size()];
i = 0;
iter = documentIDMap.keySet().iterator();
while (iter.hasNext())
{
compositeIDArray[i++] = (String)iter.next();
}
java.util.Arrays.sort(compositeIDArray);
DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()];
boolean[] rvalBoolean = new boolean[documentIDMap.size()];
i = 0;
while (i < compositeIDArray.length)
{
String compositeDocID = compositeIDArray[i];
DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID);
// Determine whether we can delete it from the index or not
rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null);
// Set the record status to "being cleaned" and return it
rval[i++] = dd;
jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue());
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return new DocumentSetAndFlags(rval, rvalBoolean);
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(expireStufferLock);
sleepFor(sleepAmt);
}
}
}
// This method supports the "queue stuffer" thread
/**
/** Get up to the next n document(s) to be fetched and processed.
* This fetch returns records that contain the document identifier, plus all instructions
* pertaining to the document's handling (e.g. whether it should be refetched if the version
* has not changed).
* This method also marks the documents whose descriptions have be returned as "being processed".
*@param n is the maximum number of records desired.
*@param currentTime is the current time; some fetches do not occur until a specific time.
*@param interval is the number of milliseconds that this set of documents should represent (for throttling).
*@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization,
* but could not be queued due to throttling considerations.
*@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing
* so that individual connections are not overwhelmed.
*@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due
* to being overcommitted.
*@return the array of document descriptions to fetch and process.
*/
public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval,
BlockingDocuments blockingDocuments, PerformanceStatistics statistics,
DepthStatistics scanRecord)
throws ManifoldCFException
{
// NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in
// a given interval. Therefore, the returned result has the following constraints on it:
// 1) There must be no more than n documents returned total;
// 2) For any given job that is throttled, the total number of documents returned must be
// consistent with the time interval provided.
// In general, this requires the database layer to perform fairly advanced filtering on the
// the result, far in excess of a simple count. An implementation of an interface is therefore
// going to need to be passed into the performQuery() operation, which prunes the resultset
// as it is being read into memory. That's a new feature that will need to be added to the
// database layer.
// Screening query
// Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up
// for an extended period of time.
if (!jobs.activeJobsPresent())
return new DocumentDescription[0];
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to find documents to queue");
}
// Below there used to be one large transaction, with multiple read seconds and multiple write sections.
// As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this
// transaction up. However, the transaction depended for its correctness in throttling on making sure
// that the throttles that were built were based on the same active jobs that the subsequent queries
// that did the stuffing relied upon. This made reorganization impossible until I realized that with
// Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more
// robust solution.
//
// Specifically, I chose to change the way documents were queued so that only documents from properly
// throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track
// the very knowledge of an active job. This had the additional benefit of meaning there was no chance of
// a query occurring from inside a resultset filter.
//
// But, after I did this, it was no longer necessary to have such a large transaction either.
// Anything older than 10 minutes ago is considered eligible for reprioritization.
long prioritizationTime = currentTime - 60000L * 10L;
ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime);
IResultSet jobconnections = jobs.getActiveJobConnections();
HashMap connectionSet = new HashMap();
int i = 0;
while (i < jobconnections.getRowCount())
{
IResultRow row = jobconnections.getRow(i++);
Long jobid = (Long)row.getValue("jobid");
String connectionName = (String)row.getValue("connectionname");
vList.addJob(jobid,connectionName);
connectionSet.put(connectionName,connectionName);
}
// Find the active connection names. We'll load these, and then get throttling info
// from each one.
String[] activeConnectionNames = new String[connectionSet.size()];
Iterator iter = connectionSet.keySet().iterator();
i = 0;
while (iter.hasNext())
{
activeConnectionNames[i++] = (String)iter.next();
}
IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames);
// Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment
// factor and set the connection limits.
HashMap rawFetchCounts = new HashMap();
double rawFetchCountTotal = 0.0;
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
int maxConnections = connection.getMaxConnections();
double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName);
double weightedRawFetchCount = avgFetchRate * (double)maxConnections;
// Keep the avg rate for later use, since it may get updated before next time we need it.
rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount));
rawFetchCountTotal += weightedRawFetchCount;
}
// Calculate an adjustment factor
double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal;
// For each job, we must amortize the maximum number of fetches per ms to the actual interval,
// and also randomly select an extra fetch based on the fractional probability. (This latter is
// necessary for the case where the maximum fetch rate is specified to be pretty low.)
//
i = 0;
while (i < connections.length)
{
IRepositoryConnection connection = connections[i++];
String connectionName = connection.getName();
// Check if throttled...
String[] throttles = connection.getThrottles();
int k = 0;
while (k < throttles.length)
{
// The key is the regexp value itself
String throttle = throttles[k++];
float throttleValue = connection.getThrottleValue(throttle);
// For the given connection, set the fetch limit per bin. This is calculated using the time interval
// and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch"
// on a weighted random basis.
//
// In the future, the connection may specify tuples which pair a regexp describing a set of bins against
// a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum
// count.
double fetchesPerTimeInterval = (double)throttleValue * (double)interval;
// Actual amount will be the integer value of this, plus an additional 1 if the random number aligns
int fetches = (int)fetchesPerTimeInterval;
fetchesPerTimeInterval -= (double)fetches;
if (random.nextDouble() <= fetchesPerTimeInterval)
fetches++;
// Save the limit in the ThrottleLimit structure
vList.addLimit(connectionName,throttle,fetches);
}
// For the overall connection, we also have a limit which is based on the number of connections there are actually available.
Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName);
double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor;
// Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing
// the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots
// of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment.
// One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much.
//
// Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum.
int fetchCount = ((int)adjustedFetchCount) + 5;
vList.setConnectionLimit(connectionName,fetchCount);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue");
// System.out.println("Done building throttle structure");
// Locate records.
// Note that we do NOT want to get everything there is to know about the job
// using this query, since the file specification may be large and expensive
// to parse. We will load a (cached) copy of the job description for that purpose.
//
// NOTE: This query deliberately excludes documents which may be being processed by another job.
// (It actually excludes a bit more than that, because the exact query is impossible to write given
// the fact that document id's cannot be compared.) These are documents where there is ANOTHER
// document entry with the same hash value, a different job id, and a status which is either "active",
// "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or
// whether the document id's are in fact the same, and therefore may temporarily block legitimate document
// activity under rare circumstances.)
//
// The query I want is:
// SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx
// AND EXISTS(SELECT 'x' FROM
// jobs t1 WHERE t0.jobid=t1.id AND t1.status='A')
// AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid
// AND t2.status IN ('A','F','D'))
// ORDER BY docpriority ASC LIMIT xxx
//
// NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every
// document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at
// one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents
// without working with a monster resultset.
//
// I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index
// to pull its results in an ordered fashion
//
//
// Another subtlety is that I *must* mark the documents active as I find them, so that they do not
// have any chance of getting returned twice.
// Accumulate the answers here
ArrayList answers = new ArrayList();
// The current time value
Long currentTimeValue = new Long(currentTime);
// Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance.
// This turned out to be needed in postgresql 8.3, even though 8.2 worked fine.
//jobQueue.unconditionallyAnalyzeTables();
// Loop through priority values
int currentPriority = 1;
boolean isDone = false;
while (!isDone && currentPriority <= 10)
{
if (jobs.hasPriorityJobs(currentPriority))
{
Long currentPriorityValue = new Long((long)currentPriority);
fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections);
isDone = !vList.checkContinue();
}
currentPriority++;
}
// Assert the blocking documents we discovered
vList.tallyBlockingDocuments(blockingDocuments);
// Convert the saved answers to an array
DocumentDescription[] rval = new DocumentDescription[answers.size()];
i = 0;
while (i < rval.length)
{
rval[i] = (DocumentDescription)answers.get(i);
i++;
}
// After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment.
// This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are
// higher than the current level that is currently being dequeued.
//
// The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current
// document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs'
// job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well.
// These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary
// to know what classes a document belonged to in order to be able to calculate its priority.
//
// An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc.
// That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal.
// Find the one row from a live job that has the best document priority, which is available within the current time window.
// Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even
// germane at the moment.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",")
.append(jobQueue.docHashField).append(",").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause(jobQueue.statusField,
new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)}))
.append(") ");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField},
true)).append(" ")
.append(database.constructOffsetLimitClause(0,1,true));
IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null);
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue)
scanRecord.addBins(docPriority);
}
return rval;
}
/** Fetch and process documents matching the passed-in criteria */
protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue,
ThrottleLimit vList, IRepositoryConnection[] connections)
throws ManifoldCFException
{
// Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active.
// When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query.
// So, instead, as part of CONNECTORS-781, I've introduced a write lock for the pertinent section.
ArrayList list = new ArrayList();
StringBuilder sb = new StringBuilder("SELECT t0.");
sb.append(jobQueue.idField).append(",t0.");
if (Logging.scheduling.isDebugEnabled())
sb.append(jobQueue.docPriorityField).append(",t0.");
sb.append(jobQueue.jobIDField).append(",t0.")
.append(jobQueue.docHashField).append(",t0.")
.append(jobQueue.docIDField).append(",t0.")
.append(jobQueue.statusField).append(",t0.")
.append(jobQueue.failTimeField).append(",t0.")
.append(jobQueue.failCountField).append(",t0.")
.append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName())
.append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
//new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)),
new MultiClause("t0."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(JobQueue.STATUS_PENDING),
jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)),
new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t1."+jobs.statusField,new Object[]{
Jobs.statusToString(jobs.STATUS_ACTIVE),
Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}),
new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField),
new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)}))
.append(") AND ");
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ")
.append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField)
.append(") AND ");
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
// Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name)
sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.")
.append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.")
.append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField)
.append(")");
sb.append(" ").append(database.constructIndexOrderByClause(new String[]{
"t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField},
true)).append(" ");
// Before entering the transaction, we must provide the throttlelimit object with all the connector
// instances it could possibly need. The purpose for doing this is to prevent a deadlock where
// connector starvation causes database lockup.
//
// The preallocation of multiple connector instances is certainly a worry. If any other part
// of the code allocates multiple connector instances also, the potential exists for this to cause
// deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple"
// at the connector factory level to make sure these requests are properly ordered.
String[] orderingKeys = new String[connections.length];
String[] classNames = new String[connections.length];
ConfigParams[] configParams = new ConfigParams[connections.length];
int[] maxConnections = new int[connections.length];
int k = 0;
while (k < connections.length)
{
IRepositoryConnection connection = connections[k];
orderingKeys[k] = connection.getName();
classNames[k] = connection.getClassName();
configParams[k] = connection.getConfigParams();
maxConnections[k] = connection.getMaxConnections();
k++;
}
// Never sleep with a resource locked!
while (true)
{
long sleepAmt = 0L;
// Write lock insures that only one thread cluster-wide can be doing this at a given time, so FOR UPDATE is unneeded.
lockManager.enterWriteLock(stufferLock);
try
{
IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections);
try
{
// Hand the connectors off to the ThrottleLimit instance
k = 0;
while (k < connections.length)
{
vList.addConnectionName(connections[k].getName(),connectors[k]);
k++;
}
// Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap
int limitValue = vList.getRemainingDocuments();
sb.append(database.constructOffsetLimitClause(0,limitValue,true));
if (Logging.perf.isDebugEnabled())
{
Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+
" (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)");
}
database.beginTransaction();
try
{
IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents");
// To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash,
// before updating any rows in jobqueue.
String[] docIDHashes = new String[set.getRowCount()];
Map storageMap = new HashMap();
Map statusMap = new HashMap();
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long id = (Long)row.getValue(jobQueue.idField);
Long jobID = (Long)row.getValue(jobQueue.jobIDField);
String docIDHash = (String)row.getValue(jobQueue.docHashField);
String docID = (String)row.getValue(jobQueue.docIDField);
int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString());
Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField);
Long failCountValue = (Long)row.getValue(jobQueue.failCountField);
long failTime;
if (failTimeValue == null)
failTime = -1L;
else
failTime = failTimeValue.longValue();
int failCount;
if (failCountValue == null)
failCount = -1;
else
failCount = (int)failCountValue.longValue();
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount);
docIDHashes[i] = docIDHash + ":" + jobID;
storageMap.put(docIDHashes[i],dd);
statusMap.put(docIDHashes[i],new Integer(status));
if (Logging.scheduling.isDebugEnabled())
{
Double docPriority = (Double)row.getValue(jobQueue.docPriorityField);
Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list");
}
i++;
}
// No duplicates are possible here
java.util.Arrays.sort(docIDHashes);
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash);
Long id = dd.getID();
int status = ((Integer)statusMap.get(docIDHash)).intValue();
// Set status to "ACTIVE".
jobQueue.updateActiveRecord(id,status);
answers.add(dd);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
RepositoryConnectorFactory.releaseMultiple(connectors);
}
}
finally
{
lockManager.leaveWriteLock(stufferLock);
sleepFor(sleepAmt);
}
}
}
// These methods support the individual fetch/process threads.
/** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy.
* The query done within MUST be cached in order to not cause undue performance degradation.
*@param jobID is the job identifier.
*@return true if the job is in one of the "active" states.
*/
public boolean checkJobActive(Long jobID)
throws ManifoldCFException
{
return jobs.checkJobActive(jobID);
}
/** Verify if a job is still processing documents, or no longer has any outstanding active documents */
public boolean checkJobBusy(Long jobID)
throws ManifoldCFException
{
return jobQueue.checkJobBusy(jobID);
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescriptions are the description objects for the documents that were processed.
*/
public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions)
throws ManifoldCFException
{
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
// Start the transaction now
database.beginTransaction();
try
{
// Do one row at a time, to avoid deadlocking things
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
jobQueue.updateCompletedRecord(dd.getID(),status);
}
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Note completion of document processing by a job thread of a document.
* This method causes the state of the document to be marked as "completed".
*@param documentDescription is the description object for the document that was processed.
*/
public void markDocumentCompleted(DocumentDescription documentDescription)
throws ManifoldCFException
{
markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription});
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// It's no longer an issue to have to deal with documents being conditionally deleted; that's been
// taken over by the hopcountremoval method below. So just use the simple 'delete' functionality.
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be deleted, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
// For each record, we're going to have to choose between marking it as "hopcount removed", and marking
// it for rescan. So the basic flow will involve changing a document's status,.
// Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE
// transaction is needed in order to complete these documents correctly.
//
// Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to
// lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to
// order the "select for update" operations appropriately.
//
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
docIDHashes[i] = documentIDHash;
indexMap.put(documentIDHash,new Integer(i));
i++;
}
java.util.Arrays.sort(docIDHashes);
// Retry loop - in case we get a deadlock despite our best efforts
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Do one row at a time, to avoid deadlocking things
List<String> deleteList = new ArrayList<String>();
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
// Get the DocumentDescription object
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Query for the status
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())});
TrackerClass.notePreread(dd.getID());
IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+
query+" FOR UPDATE",list,null,null);
TrackerClass.noteRead(dd.getID());
if (set.getRowCount() > 0)
{
IResultRow row = set.getRow(0);
// Grab the status
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
// Update the jobqueue table
boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status);
if (didDelete)
{
deleteList.add(dd.getDocumentIdentifierHash());
}
}
i++;
}
String[] docIDSimpleHashes = new String[deleteList.size()];
for (int j = 0; j < docIDSimpleHashes.length; j++)
{
docIDSimpleHashes[j] = deleteList.get(j);
}
// Next, find the documents that are affected by carrydown deletion.
DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Since hopcount inheritance and prerequisites came from the addDocument() method,
// we don't delete them here.
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+
" docs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Mark hopcount removal from queue as a result of processing of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted
* as meaning that the document should not be marked as removed, but should instead be popped back on the queue for
* a repeat processing attempt.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of expiration of an active document.
* The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING,
* ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired,
* no special activity takes place as a result of the document being in a RESCAN state.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod);
}
/** Delete from queue as a result of cleaning up an unreachable document.
* The document is expected to be in the PURGATORY state. There is never any need to reprocess the
* document.
*@param documentDescription is the description object for the document that was processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription,
int hopcountMethod)
throws ManifoldCFException
{
return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod);
}
/** Delete documents with no repercussions. We don't have to worry about the current state of each document,
* since the document is definitely going away.
*@param documentDescriptions are the set of description objects for the documents that were processed.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions,
int hopcountMethod)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new DocumentDescription[0];
// Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else.
// In all cases, the state of the document excludes other activity.
// The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add
// a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid
// the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere
// else.
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString());
}
HashMap indexMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort by doc hash, to establish non-blocking lock order
java.util.Arrays.sort(docIDHashes);
DocumentDescription[] rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
String[] docIDSimpleHashes = new String[docIDHashes.length];
// Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these
// rows that might get affected by carrydown data deletion, not the rows themselves!
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()];
// Individual operations are necessary so order can be controlled.
jobQueue.deleteRecord(dd.getID());
docIDSimpleHashes[i] = dd.getDocumentIdentifierHash();
i++;
}
// Next, find the documents that are affected by carrydown deletion.
rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes);
// Finally, delete the carrydown records in question.
carryDown.deleteRecords(jobID,docIDSimpleHashes);
if (legalLinkTypes.length > 0)
hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+
" docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Helper method: Find the document descriptions that will be affected due to carrydown row deletions.
*/
protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes)
throws ManifoldCFException
{
// Break the request into pieces, as needed, and throw everything into a hash for uniqueness.
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = maxClauseProcessDeleteHashSet();
int i = 0;
int z = 0;
while (i < docIDHashes.length)
{
if (z == maxCount)
{
processDeleteHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(docIDHashes[i]);
i++;
z++;
}
if (z > 0)
processDeleteHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Get maximum count.
*/
protected int maxClauseProcessDeleteHashSet()
{
return database.findConjunctionClauseMax(new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)});
}
/** Helper method: look up rows affected by a deleteRecords operation.
*/
protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newList = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)}));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newList,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}))
.append(")");
*/
IResultSet set = database.performQuery(sb.toString(),newList,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTimes are the times that the documents should be rescanned. Null indicates "never".
*@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes,
int[] actions)
throws ManifoldCFException
{
String[] docIDHashes = new String[documentDescriptions.length];
Long[] ids = new Long[documentDescriptions.length];
Long[] executeTimesNew = new Long[documentDescriptions.length];
int[] actionsNew = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimesNew[i] = executeTimes[index];
actionsNew[i] = actions[index];
i++;
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Requeue a document for further processing in the future.
* This method is called after a document is processed, when the job is a "continuous" one.
* It is essentially equivalent to noting that the document processing is complete, except the
* document remains on the queue.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned. Null indicates "never".
*@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE.
*/
public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action)
throws ManifoldCFException
{
requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action});
}
/** Reset a set of documents for further processing in the future.
* This method is called after some unknown number of the documents were processed, but then a service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param executeTime is the time that the documents should be rescanned.
*@param failTime is the time beyond which a service interruption will be considered a hard failure.
*@param failCount is the number of retries beyond which a service interruption will be considered a hard failure.
*/
public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime,
int action, long failTime, int failCount)
throws ManifoldCFException
{
Long executeTimeLong = new Long(executeTime);
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
Long[] executeTimes = new Long[documentDescriptions.length];
int[] actions = new int[documentDescriptions.length];
long[] failTimes = new long[documentDescriptions.length];
int[] failCounts = new int[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
executeTimes[i] = executeTimeLong;
actions[i] = action;
long oldFailTime = documentDescriptions[index].getFailTime();
if (oldFailTime == -1L)
oldFailTime = failTime;
failTimes[i] = oldFailTime;
int oldFailCount = documentDescriptions[index].getFailRetryCount();
if (oldFailCount == -1)
oldFailCount = failCount;
else
{
oldFailCount--;
if (failCount != -1 && oldFailCount > failCount)
oldFailCount = failCount;
}
failCounts[i] = oldFailCount;
i++;
}
// Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether
// an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]);
i++;
}
database.performCommit();
break;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a set of cleaning documents for further processing in the future.
* This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUncleaningStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a cleaning document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description of the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset a set of deleting documents for further processing in the future.
* This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred.
* Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not.
* If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's
* current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that
* special logic is probably not worth it.
*@param documentDescriptions is the set of description objects for the document that was processed.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime)
throws ManifoldCFException
{
Long[] ids = new Long[documentDescriptions.length];
String[] docIDHashes = new String[documentDescriptions.length];
// First loop maps document identifier back to an index.
HashMap indexMap = new HashMap();
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID();
indexMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort!
java.util.Arrays.sort(docIDHashes);
// Next loop populates the actual arrays we use to feed the operation so that the ordering is correct.
i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i];
Integer x = (Integer)indexMap.remove(docIDHash);
if (x == null)
throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!");
int index = x.intValue();
ids[i] = documentDescriptions[index].getID();
i++;
}
// Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order.
i = 0;
while (i < ids.length)
{
jobQueue.setUndeletingStatus(ids[i],checkTime);
i++;
}
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a deleting document back to its former state.
* This gets done when a deleting thread sees a service interruption, etc., from the ingestion system.
*@param documentDescription is the description object for the document that was cleaned.
*@param checkTime is the minimum time for the next cleaning attempt.
*/
public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime)
throws ManifoldCFException
{
resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime);
}
/** Reset an active document back to its former state.
* This gets done when there's a service interruption and the document cannot be processed yet.
* Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any
* processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now
* presume that processing has perhaps occurred. Perfect rollback is thus no longer possible.
*@param documentDescription is the description object for the document that was processed.
*@param executeTime is the time that the document should be rescanned.
*@param failTime is the time that the document should be considered to have failed, if it has not been
* successfully read until then.
*/
public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime,
int failCount)
throws ManifoldCFException
{
resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount);
}
/** Eliminate duplicates, and sort */
protected static String[] eliminateDuplicates(String[] docIDHashes)
{
HashMap map = new HashMap();
int i = 0;
while (i < docIDHashes.length)
{
String docIDHash = docIDHashes[i++];
map.put(docIDHash,docIDHash);
}
String[] rval = new String[map.size()];
i = 0;
Iterator iter = map.keySet().iterator();
while (iter.hasNext())
{
rval[i++] = (String)iter.next();
}
java.util.Arrays.sort(rval);
return rval;
}
/** Build a reorder map, describing how to convert an original index into a reordered index. */
protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes)
{
HashMap reorderSet = new HashMap();
int i = 0;
while (i < reorderedIDHashes.length)
{
String reorderedIDHash = reorderedIDHashes[i];
Integer position = new Integer(i);
reorderSet.put(reorderedIDHash,position);
i++;
}
HashMap map = new HashMap();
int j = 0;
while (j < originalIDHashes.length)
{
String originalIDHash = originalIDHashes[j];
Integer position = (Integer)reorderSet.get(originalIDHash);
if (position != null)
{
map.put(new Integer(j),position);
// Remove, so that only one of each duplicate will have a place in the map
reorderSet.remove(originalIDHash);
}
j++;
}
return map;
}
/** Add an initial set of documents to the queue.
* This method is called during job startup, when the queue is being loaded.
* A set of document references is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDs are the local document identifiers.
*@param overrideSchedule is true if any existing document schedule should be overridden.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*@param currentTime is the current time in milliseconds since epoch.
*@param documentPriorities are the document priorities corresponding to the document identifiers.
*@param prereqEventNames are the events that must be completed before each document can be processed.
*@return true if the priority value(s) were used, false otherwise.
*/
public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs, boolean overrideSchedule,
int hopcountMethod, long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
// The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead.
// But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with
// our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index.
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized
// transactions are used. But serialized transactions may require a retry in order
// to resolve transaction conflicts.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
int z = 0;
while (z < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[z];
double docPriority = reorderedDocumentPriorities[z];
String docID = reorderedDocumentIdentifiers[z];
String[] docPrereqs = reorderedDocumentPrerequisites[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
long executeTime = overrideSchedule?0L:-1L;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs);
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs);
priorityUsed = true;
}
reorderedRval[z++] = priorityUsed;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs and hopcounts for job "+jobID.toString());
// Rejigger to correspond with calling order
i = 0;
while (i < docIDs.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" initial docs for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add an initial set of remaining documents to the queue.
* This method is called during job startup, when the queue is being loaded, to list documents that
* were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to
* enable the framework to get rid of old, invalid seeds. They are not queued for processing.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param hopcountMethod is either accurate, nodelete, or neverdelete.
*/
public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return;
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes);
if (legalLinkTypes.length > 0)
hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString());
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Signal that a seeding pass has been done.
* Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to
* maintain the hopcount table.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot
* supply a full list of seeds on every seeding iteration; this acknowledges that limitation.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial,
int hopcountMethod)
throws ManifoldCFException
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used.
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to start finishing initial docs and hopcounts for job "+jobID.toString());
jobQueue.doneDocumentsInitial(jobID,isPartial);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs for job "+jobID.toString());
if (legalLinkTypes.length > 0)
hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+
" ms to finish initial docs and hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the specified hop counts, with the limit as described.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the hashes for the set of documents to find the hopcount for.
*@param linkType is the kind of link to find the hopcount for.
*@param limit is the limit, beyond which a negative distance may be returned.
*@param hopcountMethod is the method for managing hopcounts that is in effect.
*@return a vector of booleans corresponding to the documents requested. A true value is returned
* if the document is within the specified limit, false otherwise.
*/
public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit,
int hopcountMethod)
throws ManifoldCFException
{
if (docIDHashes.length == 0)
return new boolean[0];
if (legalLinkTypes.length == 0)
throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept");
// The idea is to delay queue processing as much as possible, because that avoids having to wait
// on locks and having to repeat our evaluations.
//
// Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value
// we find in the table is an upper bound on the true hop distance value. So, only if we have documents
// that are outside the limit does the queue need to be processed.
//
// It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the
// estimate is possibly greater than the true value, a great deal of locking and queue processing will be
// avoided.
// The flow here is to:
// - grab the right hoplock
// - process the queue
// - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString());
}
// Make an answer array.
boolean[] rval = new boolean[docIDHashes.length];
// Make a hash of what we still need a definitive answer for.
HashMap badAnswers = new HashMap();
int i = 0;
while (i < rval.length)
{
String docIDHash = docIDHashes[i];
rval[i] = false;
badAnswers.put(docIDHash,new Integer(i));
i++;
}
int iterationCount = 0;
while (true)
{
// Ask for only about documents we don't have a definitive answer for yet.
String[] askDocIDHashes = new String[badAnswers.size()];
i = 0;
Iterator iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+
" hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+
" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
if (badAnswers.size() == 0)
return rval;
// It appears we need to process the queue. We need to enter the hoplock section
// to make sure only one player is updating values at a time. Then, before we exit, we get the
// remaining values.
askDocIDHashes = new String[badAnswers.size()];
i = 0;
iter = badAnswers.keySet().iterator();
while (iter.hasNext())
{
askDocIDHashes[i++] = (String)iter.next();
}
// Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something
// other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing,
// so I've taken the route of prohibiting more than one batch of queue processing at a time, for now.
String hopLockName = getHopLockName(jobID);
long sleepAmt = 0L;
lockManager.enterWriteLock(hopLockName);
try
{
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number)
// and the requirement that database writes are effectively blocked for a while (which argues for a smaller number).
boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod);
// If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were
// interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that
// until we really ARE done.
if (!definitive)
{
// Sleep a little bit so another thread can have a whack at things
sleepAmt = 100L;
database.performCommit();
continue;
}
// Definitive answers found; continue through.
distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType);
database.performCommit();
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
finally
{
lockManager.leaveWriteLock(hopLockName);
sleepFor(sleepAmt);
}
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+
" hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)");
// All answers are guaranteed to be accurate now.
i = 0;
while (i < distances.length)
{
int distance = distances[i];
String docIDHash = askDocIDHashes[i];
if (distance != -1 && distance <= limit)
{
// Found a usable value
rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true;
}
i++;
}
return rval;
}
}
/** Get all the current seeds.
* Returns the seed document identifiers for a job.
*@param jobID is the job identifier.
*@return the document identifiers that are currently considered to be seeds.
*/
public String[] getAllSeeds(Long jobID)
throws ManifoldCFException
{
return jobQueue.getAllSeeds(jobID);
}
/** Add documents to the queue in bulk.
* This method is called during document processing, when a set of document references are discovered.
* The document references are passed to this method, which updates the status of the document(s)
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHashes are the local document identifier hashes.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here,
* it is the caller's responsibility to clean these up.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param documentPriorities are the desired document priorities for the documents.
*@param prereqEventNames are the events that must be completed before a document can be queued.
*@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used).
*/
public boolean[] addDocuments(Long jobID, String[] legalLinkTypes,
String[] docIDHashes, String[] docIDs,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[][] dataNames, Object[][][] dataValues,
long currentTime, double[] documentPriorities,
String[][] prereqEventNames)
throws ManifoldCFException
{
if (docIDs.length == 0)
return new boolean[0];
// Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions.
// However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are
// duplicate document identifiers.
HashMap nameMap = new HashMap();
int k = 0;
while (k < docIDHashes.length)
{
String docIDHash = docIDHashes[k];
// If there are duplicates, we need to merge them.
HashMap names = (HashMap)nameMap.get(docIDHash);
if (names == null)
{
names = new HashMap();
nameMap.put(docIDHash,names);
}
String[] nameList = dataNames[k];
Object[][] dataList = dataValues[k];
int z = 0;
while (z < nameList.length)
{
String name = nameList[z];
Object[] values = dataList[z];
HashMap valueMap = (HashMap)names.get(name);
if (valueMap == null)
{
valueMap = new HashMap();
names.put(name,valueMap);
}
int y = 0;
while (y < values.length)
{
// Calculate the value hash; that's the true key, and the one that cannot be duplicated.
String valueHash;
if (values[y] instanceof CharacterInput)
{
// It's a CharacterInput object.
valueHash = ((CharacterInput)values[y]).getHashValue();
}
else
{
// It better be a String.
valueHash = ManifoldCF.hash((String)values[y]);
}
valueMap.put(valueHash,values[y]);
y++;
}
z++;
}
k++;
}
String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes);
HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes);
double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length];
String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][];
String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length];
boolean[] rval = new boolean[docIDHashes.length];
int i = 0;
while (i < docIDHashes.length)
{
Integer newPosition = (Integer)reorderMap.get(new Integer(i));
if (newPosition != null)
{
reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i];
if (prereqEventNames != null)
reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i];
else
reorderedDocumentPrerequisites[newPosition.intValue()] = null;
reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i];
}
rval[i] = false;
i++;
}
dataNames = new String[reorderedDocIDHashes.length][];
String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][];
dataValues = new Object[reorderedDocIDHashes.length][][];
k = 0;
while (k < reorderedDocIDHashes.length)
{
String docIDHash = reorderedDocIDHashes[k];
HashMap names = (HashMap)nameMap.get(docIDHash);
dataNames[k] = new String[names.size()];
dataHashValues[k] = new String[names.size()][];
dataValues[k] = new Object[names.size()][];
Iterator iter = names.keySet().iterator();
int z = 0;
while (iter.hasNext())
{
String dataName = (String)iter.next();
(dataNames[k])[z] = dataName;
HashMap values = (HashMap)names.get(dataName);
(dataHashValues[k])[z] = new String[values.size()];
(dataValues[k])[z] = new Object[values.size()];
Iterator iter2 = values.keySet().iterator();
int y = 0;
while (iter2.hasNext())
{
String dataValueHash = (String)iter2.next();
Object dataValue = values.get(dataValueHash);
((dataHashValues[k])[z])[y] = dataValueHash;
((dataValues[k])[z])[y] = dataValue;
y++;
}
z++;
}
k++;
}
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash);
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
// Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update)
HashMap existingRows = new HashMap();
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.docHashField,docIDHash),
new UnitaryClause(jobQueue.jobIDField,jobID)}));
sb.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
boolean priorityUsed;
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
else
{
// Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried.
jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]);
}
}
// Update all the carrydown data at once, for greatest efficiency.
boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues);
// Same with hopcount.
boolean[] hopcountChangesSeen = null;
if (parentIdentifierHash != null && relationshipType != null)
hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod);
// Loop through the document id's again, and perform updates where needed
boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length];
boolean reactivateRemovedHopcountRecords = false;
for (int z = 0; z < reorderedDocIDHashes.length; z++)
{
String docIDHash = reorderedDocIDHashes[z];
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It was an insert
reorderedRval[z] = true;
else
{
// It was an existing row; do the update logic
// The hopcountChangesSeen array describes whether each reference is a new one. This
// helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents
// to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all!
reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]),
reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]);
// Signal if we need to perform the flip
if (hopcountChangesSeen != null && hopcountChangesSeen[z])
reactivateRemovedHopcountRecords = true;
}
}
if (reactivateRemovedHopcountRecords)
jobQueue.reactivateHopcountRemovedRecords(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash);
i = 0;
while (i < docIDHashes.length)
{
Integer finalPosition = (Integer)reorderMap.get(new Integer(i));
if (finalPosition != null)
rval[i] = reorderedRval[finalPosition.intValue()];
i++;
}
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
sleepAmt = getRandomAmount();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+
" docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e);
continue;
}
throw e;
}
catch (RuntimeException e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Add a document to the queue.
* This method is called during document processing, when a document reference is discovered.
* The document reference is passed to this method, which updates the status of the document
* in the specified job's queue, according to specific state rules.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param docIDHash is the local document identifier hash value.
*@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none.
* MUST be present in the case of carrydown information.
*@param relationshipType is the optional link type between this document and its parent. Pass null if there
* is no relationship with a parent.
*@param hopcountMethod is the desired method for managing hopcounts.
*@param dataNames are the names of the data to carry down to the child from this parent.
*@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above.
*@param currentTime is the time in milliseconds since epoch that will be recorded for this operation.
*@param priority is the desired document priority for the document.
*@param prereqEventNames are the events that must be completed before the document can be processed.
*@return true if the priority value was used, false otherwise.
*/
public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID,
String parentIdentifierHash, String relationshipType,
int hopcountMethod, String[] dataNames, Object[][] dataValues,
long currentTime, double priority, String[] prereqEventNames)
throws ManifoldCFException
{
return addDocuments(jobID,legalLinkTypes,
new String[]{docIDHash},new String[]{docID},
parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames},
new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0];
}
/** Complete adding child documents to the queue, for a set of documents.
* This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping.
*@param jobID is the job identifier.
*@param legalLinkTypes is the set of legal link types that this connector generates.
*@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@return the set of documents for which carrydown data was changed by this operation. These documents are likely
* to be requeued as a result of the change.
*/
public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod)
throws ManifoldCFException
{
if (parentIdentifierHashes.length == 0)
return new DocumentDescription[0];
DocumentDescription[] rval;
if (legalLinkTypes.length == 0)
{
// Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional
// deadlock is possible when a document shares multiple parents, so do the whole retry drill
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
else
{
long startTime = 0L;
if (Logging.perf.isDebugEnabled())
{
startTime = System.currentTimeMillis();
Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
}
// Postgres gets all screwed up if we permit multiple threads into the hopcount code,
// and allows one transaction to see the effects of another transaction before it's been committed.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records!
rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes);
carryDown.restoreRecords(jobID,parentIdentifierHashes);
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod);
database.performCommit();
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString());
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction finishing "+
Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
return rval;
}
/** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation.
*/
protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes)
throws ManifoldCFException
{
// We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes.
// The goal is to throw all the children into a hash, to make them unique at the end.
HashMap resultHash = new HashMap();
ArrayList list = new ArrayList();
int maxCount = database.getMaxOrClause();
int i = 0;
int z = 0;
while (i < parentIDHashes.length)
{
if (z == maxCount)
{
processParentHashSet(jobID,resultHash,list);
list.clear();
z = 0;
}
list.add(parentIDHashes[i]);
i++;
z++;
}
if (z > 0)
processParentHashSet(jobID,resultHash,list);
// Now, put together the result document list from the hash.
DocumentDescription[] rval = new DocumentDescription[resultHash.size()];
i = 0;
Iterator iter = resultHash.keySet().iterator();
while (iter.hasNext())
{
Long id = (Long)iter.next();
DocumentDescription dd = (DocumentDescription)resultHash.get(id);
rval[i++] = dd;
}
return rval;
}
/** Helper method: look up rows affected by a restoreRecords operation.
*/
protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list)
throws ManifoldCFException
{
// The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription
// object for return, and so a join is necessary against the jobqueue table.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList newlist = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(carryDown.getTableName()).append(" t1, ")
.append(jobQueue.getTableName()).append(" t0 WHERE ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t1."+carryDown.jobIDField,jobID),
new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND ");
sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField),
new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND ");
sb.append("t1.").append(carryDown.newField).append("=?");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
/*
sb.append("t0.").append(jobQueue.idField).append(",")
.append("t0.").append(jobQueue.docHashField).append(",")
.append("t0.").append(jobQueue.docIDField)
.append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND ");
sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{
new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField),
new MultiClause("t1."+carryDown.parentIDHashField,list),
new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ")
.append("t1.").append(carryDown.newField).append("=?")
.append(")");
newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE));
*/
IResultSet set = database.performQuery(sb.toString(),newlist,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long id = (Long)row.getValue(jobQueue.idField);
String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField);
String documentIdentifier = (String)row.getValue(jobQueue.docIDField);
resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier));
}
}
/** Begin an event sequence.
*@param eventName is the name of the event.
*@return true if the event could be created, or false if it's already there.
*/
public boolean beginEventSequence(String eventName)
throws ManifoldCFException
{
try
{
eventManager.createEvent(eventName);
return true;
}
catch (ManifoldCFException e)
{
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
return false;
throw e;
}
}
/** Complete an event sequence.
*@param eventName is the name of the event.
*/
public void completeEventSequence(String eventName)
throws ManifoldCFException
{
eventManager.destroyEvent(eventName);
}
/** Requeue a document set because of carrydown changes.
* This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the
* extent that if one is *already* being processed, it will need to be done over again.
*@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed.
*@param docPriorities are the document priorities to assign to the documents, if needed.
*@return a flag for each document priority, true if it was used, false otherwise.
*/
public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities)
throws ManifoldCFException
{
if (documentDescriptions.length == 0)
return new boolean[0];
// Order the updates by document hash, to prevent deadlock as much as possible.
// This map contains the original index of the document id hash.
HashMap docHashMap = new HashMap();
String[] docIDHashes = new String[documentDescriptions.length];
int i = 0;
while (i < documentDescriptions.length)
{
docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID();
docHashMap.put(docIDHashes[i],new Integer(i));
i++;
}
// Sort the hashes
java.util.Arrays.sort(docIDHashes);
boolean[] rval = new boolean[docIDHashes.length];
// Enter transaction and prepare to look up document states in dochash order
while (true)
{
long sleepAmt = 0L;
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// This is the map that will contain the rows we found, keyed by docIDHash.
HashMap existingRows = new HashMap();
// Loop through hashes in order
int j = 0;
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
// Get the index
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
// Lookup document description
DocumentDescription dd = documentDescriptions[originalIndex];
// Do the query. We can base this on the id column since we have that.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobQueue.idField).append(",")
.append(jobQueue.statusField).append(",")
.append(jobQueue.checkTimeField)
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// If the row is there, we use its current info to requeue it properly.
if (set.getRowCount() > 0)
{
// Found a row, and it is now locked.
IResultRow row = set.getRow(0);
// Decode the row
Long rowID = (Long)row.getValue(jobQueue.idField);
int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField));
Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField);
existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue));
}
j++;
}
// Ok, existingRows contains all the rows we want to try to update. Go through these and update.
while (j < docIDHashes.length)
{
String docIDHash = docIDHashes[j];
int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue();
JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash);
if (jr == null)
// It wasn't found, so the doc priority wasn't used.
rval[originalIndex] = false;
else
// It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time.
rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(),
0L,currentTime,true,docPriorities[originalIndex],null);
j++;
}
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
return rval;
}
/** Requeue a document because of carrydown changes.
* This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the
* extent that if it is *already* being processed, it will need to be done over again.
*@param documentDescription is the description object for the document that has had its parent carrydown information changed.
*@param docPriority is the document priority to assign to the document, if needed.
*@return a flag for the document priority, true if it was used, false otherwise.
*/
public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority)
throws ManifoldCFException
{
return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0];
}
/** Sleep a random amount of time after a transaction abort.
*/
protected long getRandomAmount()
{
return database.getSleepAmt();
}
protected void sleepFor(long amt)
throws ManifoldCFException
{
database.sleepFor(amt);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public String[] retrieveParentData(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValues(jobID,docIDHash,dataName);
}
/** Retrieve specific parent data for a given document.
*@param jobID is the job identifier.
*@param docIDHash is the document identifier hash value.
*@param dataName is the kind of data to retrieve.
*@return the unique data values.
*/
public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName)
throws ManifoldCFException
{
return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName);
}
// These methods support the job threads (which start jobs and end jobs)
// There is one thread that starts jobs. It simply looks for jobs which are ready to
// start, and changes their state accordingly.
// There is also a pool of threads that end jobs. These threads wait for a job that
// looks like it is done, and do completion processing if it is.
/** Start all jobs in need of starting.
* This method marks all the appropriate jobs as "in progress", which is all that should be
* needed to start them.
* It's also the case that the start event should be logged in the event log. In order to make it possible for
* the caller to do this logging, a set of job ID's will be returned containing the jobs that
* were started.
*@param currentTime is the current time in milliseconds since epoch.
*@param unwaitList is filled in with the set of job ID objects that were resumed.
*/
public void startJobs(long currentTime, ArrayList unwaitList)
throws ManifoldCFException
{
// This method should compare the lasttime field against the current time, for all
// "not active" jobs, and see if a job should be started.
//
// If a job is to be started, then the following occurs:
// (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to
// PURGATORY.
// (2) The job is labeled as "ACTIVE".
// (3) The starttime field is set.
// (4) The endtime field is nulled out.
//
// This method also assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
// Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become
// ACTIVE or PAUSED. This will occur if we have entered a new window for the job.
// Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The
// consistent practice throughout CF is to do the external locks first, then the database locks. This particular method
// thus cannot use cached job description information, because it must throw database locks first against the jobs table.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastTimeField).append(",")
.append(jobs.statusField).append(",")
.append(jobs.startMethodField).append(",")
.append(jobs.outputNameField).append(",")
.append(jobs.connectionNameField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_INACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING),
jobs.statusToString(jobs.STATUS_PAUSEDWAIT),
jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ")
.append(jobs.startMethodField).append("!=? FOR UPDATE");
list.add(jobs.startMethodToString(IJobDescription.START_DISABLE));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info
// for.
Long[] jobIDSet = new Long[set.getRowCount()];
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
jobIDSet[i++] = (Long)row.getValue(jobs.idField);
}
ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet);
i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField));
String outputName = (String)row.getValue(jobs.outputNameField);
String connectionName = (String)row.getValue(jobs.connectionNameField);
ScheduleRecord[] thisSchedule = srSet[i++];
// Run at specific times
// We need to start with the start time as given, plus one
long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1;
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+
new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString());
// Proceed to the current time, and find a match if there is one to be found.
// If not -> continue
// We go through *all* the schedule records. The one that matches that has the latest
// end time is the one we take.
Long matchTime = null;
Long duration = null;
boolean requestMinimum = false;
for (int l = 0; l < thisSchedule.length; l++)
{
long trialStartInterval = startInterval;
ScheduleRecord sr = thisSchedule[l];
Long thisDuration = sr.getDuration();
if (startMethod == IJobDescription.START_WINDOWINSIDE &&
thisDuration != null)
{
// Bump the start interval back before the beginning of the current interval.
// This will guarantee a start as long as there is time in the window.
long trialStart = currentTime - thisDuration.longValue();
if (trialStart < trialStartInterval)
trialStartInterval = trialStart;
}
Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime,
sr.getDayOfWeek(),
sr.getDayOfMonth(),
sr.getMonthOfYear(),
sr.getYear(),
sr.getHourOfDay(),
sr.getMinutesOfHour(),
sr.getTimezone(),
thisDuration);
if (thisMatchTime == null)
{
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
continue;
}
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+
" to "+new Long(currentTime).toString());
if (matchTime == null || thisDuration == null ||
(duration != null && thisMatchTime.longValue() + thisDuration.longValue() >
matchTime.longValue() + duration.longValue()))
{
matchTime = thisMatchTime;
duration = thisDuration;
requestMinimum = sr.getRequestMinimum();
}
}
if (matchTime == null)
{
jobs.updateLastTime(jobID,currentTime);
continue;
}
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Calculate the end of the window
Long windowEnd = null;
if (duration != null)
{
windowEnd = new Long(matchTime.longValue()+duration.longValue());
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+
matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")");
}
int newJobState;
switch (status)
{
case Jobs.STATUS_INACTIVE:
// If job was formerly "inactive", do the full startup.
// Start this job! but with no end time.
// This does not get logged because the startup thread does the logging.
jobs.startJob(jobID,windowEnd,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Signalled for job start for job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAIT:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
unwaitList.add(jobID);
jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Un-waited (but still paused) job "+jobID);
}
break;
default:
break;
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put active or paused jobs in wait state, if they've exceeded their window.
*@param currentTime is the current time in milliseconds since epoch.
*@param waitList is filled in with the set of job ID's that were put into a wait state.
*/
public void waitJobs(long currentTime, ArrayList waitList)
throws ManifoldCFException
{
// This method assesses jobs that are ACTIVE or PAUSED to see if they should be
// converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded
// the value in the "windowend" field for the job.
//
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER),
jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER),
jobs.statusToString(jobs.STATUS_PAUSED),
jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ")
.append(jobs.windowEndField).append("<? FOR UPDATE");
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
waitList.add(jobID);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
// Make the job wait.
switch (status)
{
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_ACTIVESEEDING:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end");
}
break;
case Jobs.STATUS_PAUSED:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSEDSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
case Jobs.STATUS_PAUSINGSEEDING:
jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end");
}
break;
default:
break;
}
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically
* be called after a job's scheduling window has been changed.
*@param jobID is the job identifier.
*/
public void resetJobSchedule(Long jobID)
throws ManifoldCFException
{
// Note: This is problematic; the expected behavior is for the job to start if "we are within the window",
// but not to start if the transition to active status was long enough ago.
// Since there's no "right" way to do this, do nothing for now.
// This explicitly did NOT work - it caused the job to refire every time it was saved.
// jobs.updateLastTime(jobID,0L);
}
/** Check if the specified job parameters have a 'hit' within the specified interval.
*@param startTime is the start time.
*@param currentTimestamp is the end time.
*@param daysOfWeek is the enumerated days of the week, or null.
*@param daysOfMonth is the enumerated days of the month, or null.
*@param months is the enumerated months, or null.
*@param years is the enumerated years, or null.
*@param hours is the enumerated hours, or null.
*@param minutes is the enumerated minutes, or null.
*@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds
* from epoch is returned.
*/
protected static Long checkTimeMatch(long startTime, long currentTimestamp,
EnumeratedValues daysOfWeek,
EnumeratedValues daysOfMonth,
EnumeratedValues months,
EnumeratedValues years,
EnumeratedValues hours,
EnumeratedValues minutes,
String timezone,
Long duration)
{
// What we do here is start with the previous timestamp, and advance until we
// either encounter a match, or we exceed the current timestamp.
Calendar c;
if (timezone == null)
{
c = Calendar.getInstance();
}
else
{
c = Calendar.getInstance(TimeZone.getTimeZone(timezone));
}
// Get the current starting time
c.setTimeInMillis(startTime);
// If there's a duration value, we can't match unless we're within the window.
// That means we find a match, and then we verify that the end time is greater than the currenttimestamp.
// If not, we move on (by incrementing)
// The main loop works off of the calendar and these values.
while (c.getTimeInMillis() < currentTimestamp)
{
// Round up to the nearest minute, unless at 0 already
int x = c.get(Calendar.MILLISECOND);
if (x != c.getMinimum(Calendar.MILLISECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MILLISECOND,amtToAdd);
continue;
}
x = c.get(Calendar.SECOND);
if (x != c.getMinimum(Calendar.SECOND))
{
int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.SECOND,amtToAdd);
continue;
}
boolean startedToCareYet = false;
x = c.get(Calendar.MINUTE);
// If we care about minutes, round up, otherwise go to the 0 value
if (minutes == null)
{
if (x != c.getMinimum(Calendar.MINUTE))
{
int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MINUTE,amtToAdd);
continue;
}
}
else
{
// See if it is a legit value.
if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE)))
{
// Advance to next legit value
// We could be clever, but we just advance one
c.add(Calendar.MINUTE,1);
continue;
}
startedToCareYet = true;
}
// Hours
x = c.get(Calendar.HOUR_OF_DAY);
if (hours == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY))
{
int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.HOUR_OF_DAY,amtToAdd);
continue;
}
}
else
{
if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY)))
{
// next hour
c.add(Calendar.HOUR_OF_DAY,1);
continue;
}
startedToCareYet = true;
}
// Days of month and days of week are at the same level;
// these advance concurrently. However, if NEITHER is specified, and nothing
// earlier was, then we do the 1st of the month.
x = c.get(Calendar.DAY_OF_WEEK);
if (daysOfWeek != null)
{
if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK)))
{
// next day
c.add(Calendar.DAY_OF_WEEK,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.DAY_OF_MONTH);
if (daysOfMonth == null)
{
// If nothing is specified but the month or the year, do it on the 1st.
if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH))
{
// Move as rapidly as possible towards the first of the month. But in no case, increment
// less than one day.
int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.DAY_OF_MONTH,amtToAdd);
continue;
}
}
else
{
if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH)))
{
// next day
c.add(Calendar.DAY_OF_MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.MONTH);
if (months == null)
{
if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH))
{
int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x;
if (amtToAdd < 1)
amtToAdd = 1;
c.add(Calendar.MONTH,amtToAdd);
continue;
}
}
else
{
if (!months.checkValue(x-c.getMinimum(Calendar.MONTH)))
{
c.add(Calendar.MONTH,1);
continue;
}
startedToCareYet = true;
}
x = c.get(Calendar.YEAR);
if (years != null)
{
if (!years.checkValue(x))
{
c.add(Calendar.YEAR,1);
continue;
}
startedToCareYet = true;
}
// Looks like a match.
// Last check is to be sure we are in the window, if any. If we are outside the window,
// must skip forward.
if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp)
{
c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND));
continue;
}
return new Long(c.getTimeInMillis());
}
return null;
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*/
public void manualStart(Long jobID)
throws ManifoldCFException
{
manualStart(jobID,false);
}
/** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and
* will not cease until complete. If the job is already running, this operation will assure that
* the job does not pause when its window ends. The job can be manually paused, or manually aborted.
*@param jobID is the ID of the job to start.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualStart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
database.beginTransaction();
try
{
// First, query the appropriate fields of all jobs.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() < 1)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString());
if (status != Jobs.STATUS_INACTIVE)
throw new ManifoldCFException("Job "+jobID+" is already running");
IJobDescription jobDescription = jobs.load(jobID,true);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually starting job "+jobID);
}
// Start this job! but with no end time.
jobs.startJob(jobID,null,requestMinimum);
jobQueue.clearFailTimes(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent");
}
}
catch (ManifoldCFException e)
{
database.signalRollback();
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
}
}
/** Note job delete started.
*@param jobID is the job id.
*@param startTime is the job delete start time.
*/
public void noteJobDeleteStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobDeleteStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" delete is now started");
}
/** Note job started.
*@param jobID is the job id.
*@param startTime is the job start time.
*/
public void noteJobStarted(Long jobID, long startTime)
throws ManifoldCFException
{
jobs.noteJobStarted(jobID,startTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" is now started");
}
/** Note job seeded.
*@param jobID is the job id.
*@param seedTime is the job seed time.
*/
public void noteJobSeeded(Long jobID, long seedTime)
throws ManifoldCFException
{
jobs.noteJobSeeded(jobID,seedTime);
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Job "+jobID+" has been successfully reseeded");
}
/** Prepare for a delete scan.
*@param jobID is the job id.
*/
public void prepareDeleteScan(Long jobID)
throws ManifoldCFException
{
// No special treatment needed for hopcount or carrydown, since these all get deleted at once
// at the end of the job delete process.
TrackerClass.notePrecommit();
jobQueue.prepareDeleteScan(jobID);
TrackerClass.noteCommit();
}
/** Prepare a job to be run.
* This method is called regardless of the details of the job; what differs is only the flags that are passed in.
* The code inside will determine the appropriate procedures.
* (This method replaces prepareFullScan() and prepareIncrementalScan(). )
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*@param connectorModel is the model used by the connector for the job.
*@param continuousJob is true if the job is a continuous one.
*@param fromBeginningOfTime is true if the job is running starting from time 0.
*@param requestMinimum is true if the minimal amount of work is requested for the job run.
*/
public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod,
int connectorModel, boolean continuousJob, boolean fromBeginningOfTime,
boolean requestMinimum)
throws ManifoldCFException
{
// (1) If the connector has MODEL_ADD_CHANGE_DELETE, then
// we let the connector run the show; there's no purge phase, and therefore the
// documents are left in a COMPLETED state if they don't show up in the list
// of seeds that require the attention of the connector. However, we do need to
// preload the queue with all the existing documents, if there was any change to the
// specification information (which will mean that fromBeginningOfTime is set).
//
// (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so
// we do a full scan initialization.
//
// (3) If the connector has some other model, we look at the start time. A start
// time of 0 implies a full scan, while any other start time implies an incremental
// scan.
// Complete connector model is told everything, so no delete phase.
if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
return;
}
// If the connector model is complete via chaining, then we just need to make
// sure discovery works to queue the changes.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE)
{
if (fromBeginningOfTime)
queueAllExisting(jobID,legalLinkTypes);
else
jobQueue.preparePartialScan(jobID);
return;
}
// Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless
// the job criteria have changed.
if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime)
{
// If it is a chained model, do the partial prep.
if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD ||
connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE)
jobQueue.preparePartialScan(jobID);
return;
}
if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL &&
(connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime))
prepareFullScan(jobID,legalLinkTypes,hopcountMethod);
else
jobQueue.prepareIncrementalScan(jobID);
}
/** Queue all existing.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*/
protected void queueAllExisting(Long jobID, String[] legalLinkTypes)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
if (legalLinkTypes.length > 0)
{
jobQueue.reactivateHopcountRemovedRecords(jobID);
}
jobQueue.queueAllExisting(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Prepare for a full scan.
*@param jobID is the job id.
*@param legalLinkTypes are the link types allowed for the job.
*@param hopcountMethod describes how to handle deletions for hopcount purposes.
*/
protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// Since we delete documents here, we need to manage the hopcount part of the world too.
database.beginTransaction(database.TRANSACTION_SERIALIZED);
try
{
// Delete the documents we have never fetched, including any hopcount records we've calculated.
if (legalLinkTypes.length > 0)
{
ArrayList list = new ArrayList();
String query = database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause("t99."+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})});
hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99",
"t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField,
query,list,
hopcountMethod);
}
jobQueue.prepareFullScan(jobID);
TrackerClass.notePrecommit();
database.performCommit();
TrackerClass.noteCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
TrackerClass.noteRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
TrackerClass.noteRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Manually abort a running job. The job will be permanently stopped, and will not run again until
* automatically started based on schedule, or manually started.
*@param jobID is the job to abort.
*/
public void manualAbort(Long jobID)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually aborting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortJob(jobID,null);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*@param requestMinimum is true if a minimal job run is requested.
*/
public void manualAbortRestart(Long jobID, boolean requestMinimum)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.abortRestartJob(jobID,requestMinimum);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" restart signal successfully sent");
}
}
/** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost,
* until the job finishes on its own.
*@param jobID is the job to abort.
*/
public void manualAbortRestart(Long jobID)
throws ManifoldCFException
{
manualAbortRestart(jobID,false);
}
/** Abort a running job due to a fatal error condition.
*@param jobID is the job to abort.
*@param errorText is the error text.
*@return true if this is the first logged abort request for this job.
*/
public boolean errorAbort(Long jobID, String errorText)
throws ManifoldCFException
{
// Just whack status back to "INACTIVE". The active documents will continue to be processed until done,
// but that's fine. There will be no finishing stage, obviously.
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'");
}
boolean rval;
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
rval = jobs.abortJob(jobID,errorText);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (rval && Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" abort signal successfully sent");
}
return rval;
}
/** Pause a job.
*@param jobID is the job identifier to pause.
*/
public void pauseJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually pausing job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.pauseJob(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully paused");
}
}
/** Restart a paused job.
*@param jobID is the job identifier to restart.
*/
public void restartJob(Long jobID)
throws ManifoldCFException
{
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Manually restarting paused job "+jobID);
}
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
jobs.restartJob(jobID);
jobQueue.clearFailTimes(jobID);
database.performCommit();
break;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" successfully restarted");
}
}
/** Get the list of jobs that are ready for seeding.
*@return jobs that are active and are running in adaptive mode. These will be seeded
* based on what the connector says should be added to the queue.
*/
public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.reseedIntervalField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ")
.append(jobs.typeField).append("=? AND ")
.append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)")
.append(" FOR UPDATE");
list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS));
list.add(new Long(currentTime));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
Long r = (Long)row.getValue(jobs.reseedIntervalField);
Long reseedTime;
if (r != null)
reseedTime = new Long(currentTime + r.longValue());
else
reseedTime = null;
// Mark status of job as "active/seeding". Special status is needed so that abort
// will not complete until seeding is completed.
jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for seeding");
}
rval[i] = new JobSeedingRecord(jobID,synchTime);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for deletion.
*@return jobs that were in the "readyfordelete" state.
*/
public JobDeleteRecord[] getJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for delete startup");
}
rval[i] = new JobDeleteRecord(jobID);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Get the list of jobs that are ready for startup.
*@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state.
*/
public JobStartRecord[] getJobsReadyForStartup()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(",")
.append(jobs.lastCheckTimeField).append(",")
.append(jobs.statusField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_READYFORSTARTUP),
jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Update them all
JobStartRecord[] rval = new JobStartRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
Long x = (Long)row.getValue(jobs.lastCheckTimeField);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL);
long synchTime = 0;
if (x != null)
synchTime = x.longValue();
// Mark status of job as "starting"
jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for startup");
}
rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum);
i++;
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Inactivate a job, from the notification state.
*@param jobID is the ID of the job to inactivate.
*/
public void inactivateJob(Long jobID)
throws ManifoldCFException
{
// While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution
// it might be a good idea to put this in a transaction and have the state get checked first.
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
jobs.notificationComplete(jobID);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job starting for delete back to "ready for delete"
* state.
*@param jobID is the job id.
*/
public void resetStartDeleteJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_DELETESTARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a job that is notifying back to "ready for notify"
* state.
*@param jobID is the job id.
*/
public void resetNotifyJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state");
// Set the state of the job back to "ReadyForNotify"
jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY);
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting notify job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a starting job back to "ready for startup" state.
*@param jobID is the job id.
*/
public void resetStartupJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_STARTINGUP:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state");
// Set the state of the job back to "ReadyForStartup"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP);
break;
case Jobs.STATUS_STARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state");
// Set the state of the job back to "ReadyForStartupMinimal"
jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL);
break;
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state");
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting startup job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Reset a seeding job back to "active" state.
*@param jobID is the job id.
*/
public void resetSeedJob(Long jobID)
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Check job status
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.idField,jobID)}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
if (set.getRowCount() == 0)
throw new ManifoldCFException("No such job: "+jobID);
IResultRow row = set.getRow(0);
int status = jobs.stringToStatus((String)row.getValue(jobs.statusField));
switch (status)
{
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED);
break;
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT);
break;
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER);
break;
case Jobs.STATUS_ACTIVESEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVE);
break;
case Jobs.STATUS_ACTIVEWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT);
break;
case Jobs.STATUS_PAUSEDSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSED);
break;
case Jobs.STATUS_PAUSEDWAITSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT);
break;
case Jobs.STATUS_ABORTINGSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTING);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART);
break;
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
if (Logging.jobs.isDebugEnabled())
Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state");
// Set the state of the job back to "Active"
jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL);
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_PAUSEDWAIT:
// ok
break;
default:
throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status));
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Delete jobs in need of being deleted (which are marked "ready for delete").
* This method is meant to be called periodically to perform delete processing on jobs.
*/
public void deleteJobsReadyForDelete()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested
// document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other
// threads, so eventually a job will become eligible. This happens when there are no records that have an ingested
// status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory.
database.beginTransaction();
try
{
// The original query was:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND
// t1.status IN ('C', 'F', 'G'))
//
// However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Now, loop through this list. For each one, verify that it's okay to delete it
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
list.clear();
sb = new StringBuilder("SELECT ");
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
ManifoldCF.noteConfigurationChange();
// Remove documents from job queue
jobQueue.deleteAllJobRecords(jobID);
// Remove carrydowns for the job
carryDown.deleteOwner(jobID);
// Nothing is in a critical section - so this should be OK.
hopCount.deleteOwner(jobID);
jobs.delete(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Removed job "+jobID);
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Put all eligible jobs in the "shutting down" state.
*/
public void finishJobs()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
// The jobs we should transition:
// - are active
// - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records
database.beginTransaction();
try
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','P','F','G'))
// This did not get along well with Postgresql, so instead this is what is now done:
// (1) The query should be broken up, such that n queries are done:
// (a) the first one should get all candidate jobs (those that have the right state)
// (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1
// This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow
// early exit!!
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ACTIVE),
jobs.statusToString(jobs.STATUS_ACTIVEWAIT),
jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED),
jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT),
jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// Mark status of job as "finishing"
jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Marked job "+jobID+" for shutdown");
}
}
database.performCommit();
return;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted finishing jobs: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Find the list of jobs that need to have their connectors notified of job completion.
*@return the ID's of jobs that need their output connectors notified in order to become inactive.
*/
public JobNotifyRecord[] getJobsReadyForInactivity()
throws ManifoldCFException
{
while (true)
{
long sleepAmt = 0L;
database.beginTransaction();
try
{
// Do the query
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))}))
.append(" FOR UPDATE");
IResultSet set = database.performQuery(sb.toString(),list,null,null);
// Return them all
JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()];
int i = 0;
while (i < rval.length)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(jobs.idField);
// Mark status of job as "starting delete"
jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Found job "+jobID+" in need of notification");
}
rval[i++] = new JobNotifyRecord(jobID);
}
database.performCommit();
return rval;
}
catch (ManifoldCFException e)
{
database.signalRollback();
if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage());
sleepAmt = getRandomAmount();
continue;
}
throw e;
}
catch (Error e)
{
database.signalRollback();
throw e;
}
finally
{
database.endTransaction();
sleepFor(sleepAmt);
}
}
}
/** Complete the sequence that resumes jobs, either from a pause or from a scheduling window
* wait. The logic will restore the job to an active state (many possibilities depending on
* connector status), and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed.
*/
public void finishJobResumes(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_RESUMING),
jobs.statusToString(jobs.STATUS_RESUMINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// There are no secondary checks that need to be made; just resume
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishResumeJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Resumed job "+jobID);
}
}
}
/** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling
* window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT),
* and will record the jobs that have been so modified.
*@param timestamp is the current time in milliseconds since epoch.
*@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped.
*/
public void finishJobStops(long timestamp, ArrayList modifiedJobs)
throws ManifoldCFException
{
// The query I used to emit was:
// SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status IN ('A','F'))
// Now the query is broken up so that Postgresql behaves more efficiently.
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField)
.append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(jobs.statusField,new Object[]{
jobs.statusToString(jobs.STATUS_ABORTING),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART),
jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL),
jobs.statusToString(jobs.STATUS_PAUSING),
jobs.statusToString(jobs.STATUS_PAUSINGSEEDING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITING),
jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITING),
jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING)
})}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index.
// See CONNECTORS-290.
// We do this BEFORE updating the job state.
jobQueue.clearDocPriorities(jobID);
IJobDescription jobDesc = jobs.load(jobID,true);
modifiedJobs.add(jobDesc);
jobs.finishStopJob(jobID,timestamp);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Stopped job "+jobID);
}
}
}
/** Reset eligible jobs either back to the "inactive" state, or make them active again. The
* latter will occur if the cleanup phase of the job generated more pending documents.
*
* This method is used to pick up all jobs in the shutting down state
* whose purgatory or being-cleaned records have been all processed.
*
*@param currentTime is the current time in milliseconds since epoch.
*@param resetJobs is filled in with the set of IJobDescription objects that were reset.
*/
public void resetJobs(long currentTime, ArrayList resetJobs)
throws ManifoldCFException
{
// Query for all jobs that fulfill the criteria
// The query used to look like:
//
// SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE
// t0.id=t1.jobid AND t1.status='P')
//
// Now, the query is broken up, for performance
// Do the first query, getting the candidate jobs to be considered
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))}));
IResultSet set = database.performQuery(sb.toString(),list,null,null);
int i = 0;
while (i < set.getRowCount())
{
IResultRow row = set.getRow(i++);
Long jobID = (Long)row.getValue(jobs.idField);
// Check to be sure the job is a candidate for shutdown
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PURGATORY),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
continue;
// The shutting-down phase is complete. However, we need to check if there are any outstanding
// PENDING or PENDINGPURGATORY records before we can decide what to do.
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new UnitaryClause(jobQueue.jobIDField,jobID),
new MultiClause(jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" ").append(database.constructOffsetLimitClause(0,1));
confirmSet = database.performQuery(sb.toString(),list,null,null,1,null);
if (confirmSet.getRowCount() > 0)
{
// This job needs to re-enter the active state. Make that happen.
jobs.returnJobToActive(jobID);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" is re-entering active state");
}
}
else
{
// This job should be marked as finished.
IJobDescription jobDesc = jobs.load(jobID,true);
resetJobs.add(jobDesc);
jobs.finishJob(jobID,currentTime);
if (Logging.jobs.isDebugEnabled())
{
Logging.jobs.debug("Job "+jobID+" now completed");
}
}
}
}
// Status reports
/** Get the status of a job.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID)
throws ManifoldCFException
{
return getStatus(jobID,true);
}
/** Get a list of all jobs, and their status information.
*@return an ordered array of job status objects.
*/
@Override
public JobStatus[] getAllStatus()
throws ManifoldCFException
{
return getAllStatus(true);
}
/** Get a list of running jobs. This is for status reporting.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs()
throws ManifoldCFException
{
return getRunningJobs(true);
}
/** Get a list of completed jobs, and their statistics.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs()
throws ManifoldCFException
{
return getFinishedJobs(true);
}
/** Get the status of a job.
*@param jobID is the job ID.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
public JobStatus getStatus(Long jobID, boolean includeCounts)
throws ManifoldCFException
{
return getStatus(jobID, includeCounts, Integer.MAX_VALUE);
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts)
throws ManifoldCFException
{
return getAllStatus(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getRunningJobs(boolean includeCounts)
throws ManifoldCFException
{
return getRunningJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@return an array of the job status objects.
*/
public JobStatus[] getFinishedJobs(boolean includeCounts)
throws ManifoldCFException
{
return getFinishedJobs(includeCounts, Integer.MAX_VALUE);
}
/** Get the status of a job.
*@param includeCounts is true if document counts should be included.
*@return the status object for the specified job.
*/
@Override
public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList list = new ArrayList();
String whereClause = Jobs.idField+"=?";
list.add(jobID);
JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount);
if (records.length == 0)
return null;
return records[0];
}
/** Get a list of all jobs, and their status information.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an ordered array of job status objects.
*/
public JobStatus[] getAllStatus(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
return makeJobStatus(null,null,includeCounts,maxCount);
}
/** Get a list of running jobs. This is for status reporting.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
ArrayList whereParams = new ArrayList();
String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new MultiClause(Jobs.statusField,new Object[]{
Jobs.statusToString(Jobs.STATUS_ACTIVE),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT),
Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER),
Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER),
Jobs.statusToString(Jobs.STATUS_PAUSED),
Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT),
Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSING),
Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING),
Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING),
Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING),
Jobs.statusToString(Jobs.STATUS_RESUMING),
Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING)
})});
return makeJobStatus(whereClause,whereParams,includeCounts,maxCount);
}
/** Get a list of completed jobs, and their statistics.
*@param includeCounts is true if document counts should be included.
*@param maxCount is the maximum number of documents we want to count for each status.
*@return an array of the job status objects.
*/
@Override
public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder();
ArrayList whereParams = new ArrayList();
sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{
new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ")
.append(Jobs.endTimeField).append(" IS NOT NULL");
return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount);
}
// Protected methods and classes
/** Make a job status array from a query result.
*@param whereClause is the where clause for the jobs we are interested in.
*@return the status array.
*/
protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount)
throws ManifoldCFException
{
IResultSet set = database.performQuery("SELECT t0."+
Jobs.idField+",t0."+
Jobs.descriptionField+",t0."+
Jobs.statusField+",t0."+
Jobs.startTimeField+",t0."+
Jobs.endTimeField+",t0."+
Jobs.errorField+
" FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC",
whereParams,null,null);
// Build hashes for set2 and set3
Map<Long,Long> set2Hash = new HashMap<Long,Long>();
Map<Long,Long> set3Hash = new HashMap<Long,Long>();
Map<Long,Long> set4Hash = new HashMap<Long,Long>();
Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>();
Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>();
if (includeCounts)
{
// If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But
// otherwise, fire off an individual query at a time.
if (maxCount == Integer.MAX_VALUE)
{
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet countResult = database.performQuery(sb.toString(),list,null,null);
if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount)
{
// Too many items in queue; do it the hard way
buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
else
{
// Cheap way should still work.
buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact);
}
}
}
JobStatus[] rval = new JobStatus[set.getRowCount()];
for (int i = 0; i < rval.length; i++)
{
IResultRow row = set.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
String description = row.getValue(Jobs.descriptionField).toString();
int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString());
Long startTimeValue = (Long)row.getValue(Jobs.startTimeField);
long startTime = -1;
if (startTimeValue != null)
startTime = startTimeValue.longValue();
Long endTimeValue = (Long)row.getValue(Jobs.endTimeField);
long endTime = -1;
if (endTimeValue != null)
endTime = endTimeValue.longValue();
String errorText = (String)row.getValue(Jobs.errorField);
if (errorText != null && errorText.length() == 0)
errorText = null;
int rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
switch (status)
{
case Jobs.STATUS_INACTIVE:
if (errorText != null)
rstatus = JobStatus.JOBSTATUS_ERROR;
else
{
if (startTime >= 0)
rstatus = JobStatus.JOBSTATUS_COMPLETED;
else
rstatus = JobStatus.JOBSTATUS_NOTYETRUN;
}
break;
case Jobs.STATUS_ACTIVE_UNINSTALLED:
case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED:
case Jobs.STATUS_ACTIVE_NOOUTPUT:
case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT:
case Jobs.STATUS_ACTIVE_NEITHER:
case Jobs.STATUS_ACTIVESEEDING_NEITHER:
rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED;
break;
case Jobs.STATUS_ACTIVE:
case Jobs.STATUS_ACTIVESEEDING:
rstatus = JobStatus.JOBSTATUS_RUNNING;
break;
case Jobs.STATUS_SHUTTINGDOWN:
rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP;
break;
case Jobs.STATUS_READYFORNOTIFY:
case Jobs.STATUS_NOTIFYINGOFCOMPLETION:
rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION;
break;
case Jobs.STATUS_ABORTING:
case Jobs.STATUS_ABORTINGSEEDING:
case Jobs.STATUS_ABORTINGSTARTINGUP:
case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_ABORTING;
break;
case Jobs.STATUS_ABORTINGFORRESTART:
case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDING:
case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART:
case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL:
rstatus = JobStatus.JOBSTATUS_RESTARTING;
break;
case Jobs.STATUS_PAUSING:
case Jobs.STATUS_PAUSINGSEEDING:
case Jobs.STATUS_ACTIVEWAITING:
case Jobs.STATUS_ACTIVEWAITINGSEEDING:
case Jobs.STATUS_PAUSINGWAITING:
case Jobs.STATUS_PAUSINGWAITINGSEEDING:
rstatus = JobStatus.JOBSTATUS_STOPPING;
break;
case Jobs.STATUS_RESUMING:
case Jobs.STATUS_RESUMINGSEEDING:
rstatus = JobStatus.JOBSTATUS_RESUMING;
break;
case Jobs.STATUS_PAUSED:
case Jobs.STATUS_PAUSEDSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_ACTIVEWAIT:
case Jobs.STATUS_ACTIVEWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_WINDOWWAIT;
break;
case Jobs.STATUS_PAUSEDWAIT:
case Jobs.STATUS_PAUSEDWAITSEEDING:
rstatus = JobStatus.JOBSTATUS_PAUSED;
break;
case Jobs.STATUS_STARTINGUP:
case Jobs.STATUS_STARTINGUPMINIMAL:
case Jobs.STATUS_READYFORSTARTUP:
case Jobs.STATUS_READYFORSTARTUPMINIMAL:
rstatus = JobStatus.JOBSTATUS_STARTING;
break;
case Jobs.STATUS_DELETESTARTINGUP:
case Jobs.STATUS_READYFORDELETE:
case Jobs.STATUS_DELETING:
case Jobs.STATUS_DELETING_NOOUTPUT:
rstatus = JobStatus.JOBSTATUS_DESTRUCTING;
break;
default:
break;
}
Long set2Value = set2Hash.get(jobID);
Long set3Value = set3Hash.get(jobID);
Long set4Value = set4Hash.get(jobID);
Boolean set2ExactValue = set2Exact.get(jobID);
Boolean set3ExactValue = set3Exact.get(jobID);
Boolean set4ExactValue = set4Exact.get(jobID);
rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()),
((set3Value==null)?0L:set3Value.longValue()),
((set4Value==null)?0L:set4Value.longValue()),
((set2ExactValue==null)?true:set2ExactValue.booleanValue()),
((set3ExactValue==null)?true:set3ExactValue.booleanValue()),
((set4ExactValue==null)?true:set4ExactValue.booleanValue()),
startTime,endTime,errorText);
}
return rval;
}
protected static ClauseDescription buildOutstandingClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_ACTIVE),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN),
JobQueue.statusToString(JobQueue.STATUS_PENDING),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected static ClauseDescription buildProcessedClause()
throws ManifoldCFException
{
return new MultiClause(JobQueue.statusField,new Object[]{
JobQueue.statusToString(JobQueue.STATUS_COMPLETE),
JobQueue.statusToString(JobQueue.STATUS_UNCHANGED),
JobQueue.statusToString(JobQueue.STATUS_PURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)});
}
protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
// Fire off an individual query with a limit for each job
// First, get the list of jobs that we are interested in.
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0");
if (whereClause != null)
{
sb.append(" WHERE ")
.append(whereClause);
if (whereParams != null)
list.addAll(whereParams);
}
IResultSet jobSet = database.performQuery(sb.toString(),list,null,null);
// Scan the set of jobs
for (int i = 0; i < jobSet.getRowCount(); i++)
{
IResultRow row = jobSet.getRow(i);
Long jobID = (Long)row.getValue(Jobs.idField);
// Now, for each job, fire off a separate, limited, query for each count we care about
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet totalSet = database.performQuery(sb.toString(),list,null,null);
if (totalSet.getRowCount() > 0)
{
long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set2Hash.put(jobID,new Long(maxCount));
set2Exact.put(jobID,new Boolean(false));
}
else
{
set2Hash.put(jobID,new Long(rowCount));
set2Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null);
if (outstandingSet.getRowCount() > 0)
{
long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set3Hash.put(jobID,new Long(maxCount));
set3Exact.put(jobID,new Boolean(false));
}
else
{
set3Hash.put(jobID,new Long(rowCount));
set3Exact.put(jobID,new Boolean(true));
}
}
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)}));
sb.append(" AND ");
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false));
IResultSet processedSet = database.performQuery(sb.toString(),list,null,null);
if (processedSet.getRowCount() > 0)
{
long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue();
if (rowCount > maxCount)
{
set4Hash.put(jobID,new Long(maxCount));
set4Exact.put(jobID,new Boolean(false));
}
else
{
set4Hash.put(jobID,new Long(rowCount));
set4Exact.put(jobID,new Boolean(true));
}
}
}
}
protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams,
Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash,
Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact)
throws ManifoldCFException
{
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1");
addWhereClause(sb,list,whereClause,whereParams,false);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set2 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set3 = database.performQuery(sb.toString(),list,null,null);
sb = new StringBuilder("SELECT ");
list.clear();
sb.append(JobQueue.jobIDField).append(",")
.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount")
.append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()}));
addWhereClause(sb,list,whereClause,whereParams,true);
sb.append(" GROUP BY ").append(JobQueue.jobIDField);
IResultSet set4 = database.performQuery(sb.toString(),list,null,null);
for (int j = 0; j < set2.getRowCount(); j++)
{
IResultRow row = set2.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set2Hash.put(jobID,(Long)row.getValue("doccount"));
set2Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set3.getRowCount(); j++)
{
IResultRow row = set3.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set3Hash.put(jobID,(Long)row.getValue("doccount"));
set3Exact.put(jobID,new Boolean(true));
}
for (int j = 0; j < set4.getRowCount(); j++)
{
IResultRow row = set4.getRow(j);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
set4Hash.put(jobID,(Long)row.getValue("doccount"));
set4Exact.put(jobID,new Boolean(true));
}
}
protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent)
{
if (whereClause != null)
{
if (wherePresent)
sb.append(" AND");
else
sb.append(" WHERE");
sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ")
.append(whereClause)
.append(")");
if (whereParams != null)
list.addAll(whereParams);
}
}
// These methods generate reports for direct display in the UI.
/** Run a 'document status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the
* "retrylimit" column are long values representing a time; all other values will be user-friendly strings.
*/
public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
int startRow, int rowCount)
throws ManifoldCFException
{
// Build the query.
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t0.").append(jobQueue.idField).append(" AS id,")
.append("t0.").append(jobQueue.docIDField).append(" AS identifier,")
.append("t1.").append(jobs.descriptionField).append(" AS job,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'")
.append(" ELSE 'Unknown'")
.append(" END AS state,")
.append("CASE")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Inactive'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 'Ready for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 'Waiting for expiration'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 'Waiting forever'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append("=?")
.append(" THEN 'Hopcount exceeded'")
.append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 'Deleting'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Processing'")
.append(" WHEN ")
.append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?")
.append(" THEN 'Expiring'")
.append(" ELSE 'Unknown'")
.append(" END AS status,")
.append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,")
.append("CASE")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'")
.append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'")
.append(" ELSE 'Unknown'")
.append(" END AS action,")
.append("t0.").append(jobQueue.failCountField).append(" AS retrycount,")
.append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit")
.append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ")
.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)}));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
addCriteria(sb,list,"t0.",connectionName,filterCriteria,true);
// The intrinsic ordering is provided by the "id" column, and nothing else.
addOrdering(sb,new String[]{"id"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
/** Run a 'queue status' report.
*@param connectionName is the name of the connection.
*@param filterCriteria are the criteria used to limit the records considered for the report.
*@param sortOrder is the specified sort order of the final report.
*@param idBucketDescription is the bucket description for generating the identifier class.
*@param startRow is the first row to include.
*@param rowCount is the number of rows to include.
*@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting,
processready, expireready, processwaiting, expirewaiting
*/
public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder,
BucketDescription idBucketDescription, int startRow, int rowCount)
throws ManifoldCFException
{
// SELECT substring(docid FROM '<id_regexp>') AS idbucket,
// substring(entityidentifier FROM '<id_regexp>') AS idbucket,
// SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria>
// GROUP BY idbucket
Long currentTime = new Long(System.currentTimeMillis());
StringBuilder sb = new StringBuilder("SELECT ");
ArrayList list = new ArrayList();
sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,")
.append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,")
.append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT ");
addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription);
sb.append(" AS idbucket,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" AS inactive,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processing,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expiring,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?,?)")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as deleting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expireready,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as processwaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkActionField).append("=?")
.append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString())
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as expirewaiting,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append(" IN (?,?)")
.append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as waitingforever,")
.append("CASE")
.append(" WHEN ")
.append(jobQueue.statusField).append("=?")
.append(" THEN 1 ELSE 0")
.append(" END")
.append(" as hopcountexceeded");
sb.append(" FROM ").append(jobQueue.getTableName());
list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED));
list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED));
list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED));
list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING));
list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY));
list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED));
addCriteria(sb,list,"",connectionName,filterCriteria,false);
sb.append(") t1 GROUP BY idbucket");
addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder);
addLimits(sb,startRow,rowCount);
return database.performQuery(sb.toString(),list,null,null,rowCount,null);
}
// Protected methods for report generation
/** Turn a bucket description into a return column.
* This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is
* desired, that means we whack the whole thing to lower case before doing the match.
*/
protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc)
{
boolean isSensitive = bucketDesc.isSensitive();
list.add(bucketDesc.getRegexp());
sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive));
}
/** Add criteria clauses to query.
*/
protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted)
throws ManifoldCFException
{
Long[] matchingJobs = criteria.getJobs();
if (matchingJobs != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
if (matchingJobs.length == 0)
{
sb.append("0>1");
}
else
{
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)}));
}
}
RegExpCriteria identifierRegexp = criteria.getIdentifierMatch();
if (identifierRegexp != null)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
list.add(identifierRegexp.getRegexpString());
sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive()));
}
Long nowTime = new Long(criteria.getNowTime());
int[] states = criteria.getMatchingStates();
int[] statuses = criteria.getMatchingStatuses();
if (states.length == 0 || statuses.length == 0)
{
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("0>1");
return whereEmitted;
}
// Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex.
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
int k = 0;
while (k < states.length)
{
int stateValue = states[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATE_NEVERPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})}));
break;
case DOCSTATE_PREVIOUSLYPROCESSED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE),
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATE_OUTOFSCOPE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
whereEmitted = emitClauseStart(sb,whereEmitted);
sb.append("(");
k = 0;
while (k < statuses.length)
{
int stateValue = statuses[k];
if (k > 0)
sb.append(" OR ");
switch (stateValue)
{
case DOCSTATUS_INACTIVE:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_COMPLETE),
jobQueue.statusToString(jobQueue.STATUS_UNCHANGED),
jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})}));
break;
case DOCSTATUS_PROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))}));
break;
case DOCSTATUS_EXPIRING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_ACTIVE),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN),
jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY),
jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))}));
break;
case DOCSTATUS_DELETING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED),
jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED),
jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})}));
break;
case DOCSTATUS_READYFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_READYFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)}));
break;
case DOCSTATUS_WAITINGFORPROCESSING:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREXPIRATION:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}),
new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)),
new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)}));
break;
case DOCSTATUS_WAITINGFOREVER:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_PENDING),
jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})}))
.append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL");
break;
case DOCSTATUS_HOPCOUNTEXCEEDED:
sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{
new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{
jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}));
break;
}
k++;
}
sb.append(")");
return whereEmitted;
}
/** Emit a WHERE or an AND, depending...
*/
protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted)
{
if (whereEmitted)
sb.append(" AND ");
else
sb.append(" WHERE ");
return true;
}
/** Add ordering.
*/
protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort)
{
// Keep track of the fields we've seen
Map hash = new HashMap();
// Emit the "Order by"
sb.append(" ORDER BY ");
// Go through the specified list
int i = 0;
int count = sort.getCount();
while (i < count)
{
if (i > 0)
sb.append(",");
String column = sort.getColumn(i);
sb.append(column);
if (sort.getDirection(i) == sort.SORT_ASCENDING)
sb.append(" ASC");
else
sb.append(" DESC");
hash.put(column,column);
i++;
}
// Now, go through the complete field list, and emit sort criteria for everything
// not actually specified. This is so LIMIT and OFFSET give consistent results.
int j = 0;
while (j < completeFieldList.length)
{
String field = completeFieldList[j];
if (hash.get(field) == null)
{
if (i > 0)
sb.append(",");
sb.append(field);
sb.append(" DESC");
//if (j == 0)
// sb.append(" DESC");
//else
// sb.append(" ASC");
i++;
}
j++;
}
}
/** Add limit and offset.
*/
protected void addLimits(StringBuilder sb, int startRow, int maxRowCount)
{
sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount));
}
/** Class for tracking existing jobqueue row data */
protected static class JobqueueRecord
{
protected Long recordID;
protected int status;
protected Long checkTimeValue;
public JobqueueRecord(Long recordID, int status, Long checkTimeValue)
{
this.recordID = recordID;
this.status = status;
this.checkTimeValue = checkTimeValue;
}
public Long getRecordID()
{
return recordID;
}
public int getStatus()
{
return status;
}
public Long getCheckTimeValue()
{
return checkTimeValue;
}
}
/** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */
private static int EXTRA_FACTOR = 2;
/** This class provides the throttling limits for the job queueing query.
*/
protected static class ThrottleLimit implements ILimitChecker
{
// For each connection, there is (a) a number (which is the maximum per bin), and (b)
// a current running count per bin. These are stored as elements in a hash map.
protected HashMap connectionMap = new HashMap();
// The maximum number of jobs that have reached their chunk size limit that we
// need
protected int n;
// This is the hash table that maps a job ID to the object that tracks the number
// of documents already accumulated for this resultset. The count of the number
// of queue records we have is tallied by going through each job in this table
// and adding the records outstanding for it.
protected HashMap jobQueueHash = new HashMap();
// This is the map from jobid to connection name
protected HashMap jobConnection = new HashMap();
// This is the set of allowed connection names. We discard all documents that are
// not from that set.
protected HashMap activeConnections = new HashMap();
// This is the number of documents per set per connection.
protected HashMap setSizes = new HashMap();
// These are the individual connection maximums, keyed by connection name.
protected HashMap maxConnectionCounts = new HashMap();
// This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit.
protected int maxSetSize = 0;
// This is the number of documents processed so far
protected int documentsProcessed = 0;
// This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects.
protected ArrayList blockingDocumentArray = new ArrayList();
// Cutoff time for documents eligible for prioritization
protected long prioritizationTime;
/** Constructor.
* This class is built up piecemeal, so the constructor does nothing.
*@param n is the maximum number of full job descriptions we want at this time.
*/
public ThrottleLimit(int n, long prioritizationTime)
{
this.n = n;
this.prioritizationTime = prioritizationTime;
Logging.perf.debug("Limit instance created");
}
/** Transfer blocking documents discovered to BlockingDocuments object */
public void tallyBlockingDocuments(BlockingDocuments blockingDocuments)
{
int i = 0;
while (i < blockingDocumentArray.size())
{
DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++);
blockingDocuments.addBlockingDocument(dd);
}
blockingDocumentArray.clear();
}
/** Add a job/connection name map entry.
*@param jobID is the job id.
*@param connectionName is the connection name.
*/
public void addJob(Long jobID, String connectionName)
{
jobConnection.put(jobID,connectionName);
}
/** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation.
*@param connectionName is the connection name.
*/
public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance)
throws ManifoldCFException
{
activeConnections.put(connectionName,connectorInstance);
int setSize = connectorInstance.getMaxDocumentRequest();
setSizes.put(connectionName,new Integer(setSize));
if (setSize > maxSetSize)
maxSetSize = setSize;
}
/** Add a document limit for a specified connection. This is the limit across all matching bins; if any
* individual matching bin exceeds that limit, then documents that belong to that bin will be excluded.
*@param connectionName is the connection name.
*@param regexp is the regular expression, which we will match against various bins.
*@param upperLimit is the maximum count associated with the specified job.
*/
public void addLimit(String connectionName, String regexp, int upperLimit)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'");
ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName);
if (ji == null)
{
ji = new ThrottleJobItem();
connectionMap.put(connectionName,ji);
}
ji.addLimit(regexp,upperLimit);
}
/** Set a connection-based total document limit.
*/
public void setConnectionLimit(String connectionName, int maxDocuments)
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName);
maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments));
}
/** See if this class can be legitimately compared against another of
* the same type.
*@return true if comparisons will ever return "true".
*/
public boolean doesCompareWork()
{
return false;
}
/** Create a duplicate of this class instance. All current state should be preserved.
* NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot
* be cached, and therefore duplicate() is never called from the query executor. But it can
* be called from other places.
*@return the duplicate.
*/
public ILimitChecker duplicate()
{
return makeDeepCopy();
}
/** Make a deep copy */
public ThrottleLimit makeDeepCopy()
{
ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime);
// Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes)
// do not need a deep copy.
rval.activeConnections = activeConnections;
rval.setSizes = setSizes;
rval.maxConnectionCounts = maxConnectionCounts;
rval.maxSetSize = maxSetSize;
rval.jobConnection = jobConnection;
// The structures where counts are maintained DO need a deep copy.
rval.documentsProcessed = documentsProcessed;
Iterator iter;
iter = connectionMap.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate());
}
iter = jobQueueHash.keySet().iterator();
while (iter.hasNext())
{
Object key = iter.next();
rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate());
}
return rval;
}
/** Find the hashcode for this class. This will only ever be used if
* doesCompareWork() returns true.
*@return the hashcode.
*/
public int hashCode()
{
return 0;
}
/** Compare two objects and see if equal. This will only ever be used
* if doesCompareWork() returns true.
*@param object is the object to compare against.
*@return true if equal.
*/
public boolean equals(Object object)
{
return false;
}
/** Get the remaining documents we should query for.
*@return the maximal remaining count.
*/
public int getRemainingDocuments()
{
return EXTRA_FACTOR * n * maxSetSize - documentsProcessed;
}
/** See if a result row should be included in the final result set.
*@param row is the result row to check.
*@return true if it should be included, false otherwise.
*/
public boolean checkInclude(IResultRow row)
throws ManifoldCFException
{
// Note: This method does two things: First, it insures that the number of documents per job per bin does
// not exceed the calculated throttle number. Second, it keeps track of how many document queue items
// will be needed, so we can stop when we've got enough for the moment.
Logging.perf.debug("Checking if row should be included");
// This is the end that does the work.
// The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField
Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField);
// Get the connection name for this row
String connectionName = (String)jobConnection.get(jobIDValue);
if (connectionName == null)
{
Logging.perf.debug(" Row does not have an eligible job - excluding");
return false;
}
IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName);
if (connectorInstance == null)
{
Logging.perf.debug(" Row does not have an eligible connector instance - excluding");
return false;
}
// Find the connection limit for this document
MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName);
if (connectionLimit != null)
{
if (connectionLimit.intValue() == 0)
{
Logging.perf.debug(" Row exceeds its connection limit - excluding");
return false;
}
connectionLimit.decrement();
}
// Tally this item in the job queue hash, so we can detect when to stop
QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue);
if (queueItem == null)
{
// Need to talk to the connector to get a max number of docs per chunk
int maxCount = ((Integer)setSizes.get(connectionName)).intValue();
queueItem = new QueueHashItem(maxCount);
jobQueueHash.put(jobIDValue,queueItem);
}
String docIDHash = (String)row.getValue(JobQueue.docHashField);
String docID = (String)row.getValue(JobQueue.docIDField);
// Figure out what the right bins are, given the data we have.
// This will involve a call to the connector.
String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID);
// Keep the running count, so we can abort without going through the whole set.
documentsProcessed++;
//scanRecord.addBins(binNames);
ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName);
// If there is no schedule-based throttling on this connection, we're done.
if (item == null)
{
queueItem.addDocument();
Logging.perf.debug(" Row has no throttling - including");
return true;
}
int j = 0;
while (j < binNames.length)
{
if (item.isEmpty(binNames[j]))
{
if (Logging.perf.isDebugEnabled())
Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding");
Object o = row.getValue(JobQueue.prioritySetField);
if (o == null || ((Long)o).longValue() <= prioritizationTime)
{
// Need to add a document descriptor based on this row to the blockingDocuments object!
// This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't
// be there.
Long id = (Long)row.getValue(JobQueue.idField);
Long jobID = (Long)row.getValue(JobQueue.jobIDField);
DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID);
blockingDocumentArray.add(dd);
}
return false;
}
j++;
}
j = 0;
while (j < binNames.length)
{
item.decrement(binNames[j++]);
}
queueItem.addDocument();
Logging.perf.debug(" Including!");
return true;
}
/** See if we should examine another row.
*@return true if we need to keep going, or false if we are done.
*/
public boolean checkContinue()
throws ManifoldCFException
{
if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize)
return false;
// If the number of chunks exceeds n, we are done
Iterator iter = jobQueueHash.keySet().iterator();
int count = 0;
while (iter.hasNext())
{
Long jobID = (Long)iter.next();
QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID);
count += item.getChunkCount();
if (count > n)
return false;
}
return true;
}
}
/** This class contains information per job on how many queue items have so far been accumulated.
*/
protected static class QueueHashItem
{
// The number of items per chunk for this job
int itemsPerChunk;
// The number of chunks so far, INCLUDING incomplete chunks
int chunkCount = 0;
// The number of documents in the current incomplete chunk
int currentDocumentCount = 0;
/** Construct.
*@param itemsPerChunk is the number of items per chunk for this job.
*/
public QueueHashItem(int itemsPerChunk)
{
this.itemsPerChunk = itemsPerChunk;
}
/** Duplicate. */
public QueueHashItem duplicate()
{
QueueHashItem rval = new QueueHashItem(itemsPerChunk);
rval.chunkCount = chunkCount;
rval.currentDocumentCount = currentDocumentCount;
return rval;
}
/** Add a document to this job.
*/
public void addDocument()
{
currentDocumentCount++;
if (currentDocumentCount == 1)
chunkCount++;
if (currentDocumentCount == itemsPerChunk)
currentDocumentCount = 0;
}
/** Get the number of chunks.
*@return the number of chunks.
*/
public int getChunkCount()
{
return chunkCount;
}
}
/** This class represents the information stored PER JOB in the throttling structure.
* In this structure, "remaining" counts are kept for each bin. When the bin becomes empty,
* then no more documents that would map to that bin will be returned, for this query.
*
* The way in which the maximum count per bin is determined is not part of this class.
*/
protected static class ThrottleJobItem
{
/** These are the bin limits. This is an array of ThrottleLimitSpec objects. */
protected ArrayList throttleLimits = new ArrayList();
/** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be
* the same as maxBinCount. */
protected HashMap binCounts = new HashMap();
/** Constructor. */
public ThrottleJobItem()
{
}
/** Add a bin limit.
*@param regexp is the regular expression describing the bins to which the limit applies to.
*@param maxCount is the maximum number of fetches allowed for that bin.
*/
public void addLimit(String regexp, int maxCount)
{
try
{
throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount));
}
catch (PatternSyntaxException e)
{
// Ignore the bad entry; it just won't contribute any throttling.
}
}
/** Create a duplicate of this item.
*@return the duplicate.
*/
public ThrottleJobItem duplicate()
{
ThrottleJobItem rval = new ThrottleJobItem();
rval.throttleLimits = throttleLimits;
Iterator iter = binCounts.keySet().iterator();
while (iter.hasNext())
{
String key = (String)iter.next();
this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate());
}
return rval;
}
/** Check if the specified bin is empty.
*@param binName is the bin name.
*@return true if empty.
*/
public boolean isEmpty(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
int remaining;
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return false;
remaining = x;
}
else
remaining = value.intValue();
return (remaining == 0);
}
/** Decrement specified bin.
*@param binName is the bin name.
*/
public void decrement(String binName)
{
MutableInteger value = (MutableInteger)binCounts.get(binName);
if (value == null)
{
int x = findMaxCount(binName);
if (x == -1)
return;
value = new MutableInteger(x);
binCounts.put(binName,value);
}
value.decrement();
}
/** Given a bin name, find the max value for it using the regexps that are in place.
*@param binName is the bin name.
*@return the max count for that bin, or -1 if infinite.
*/
protected int findMaxCount(String binName)
{
// Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com"
//
// We want to be able to do a couple of different kinds of things easily. For example, we want to:
// - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains
// - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we
// can establish a faster rate for .com than for foo.metacarta.com
//
// The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number:
// ^[^\.] = 8
//
// To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate
// value be chosen when more than one regexp match is found:
// ^[^\.] = 8
// ^foo\.metacarta\.com = 4
//
// To apply different rates for different levels:
// ^[^\.] = 8
// ^\.[^\.]*\.[^\.]*$ = 20
// ^\.[^\.]*$ = 40
//
// If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be
// more what the world wants to do (restrict, rather than increase, fetch rates).
int maxCount = -1;
int i = 0;
while (i < throttleLimits.size())
{
ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++);
Pattern p = spec.getRegexp();
Matcher m = p.matcher(binName);
if (m.find())
{
int limit = spec.getMaxCount();
if (maxCount == -1 || limit < maxCount)
maxCount = limit;
}
}
return maxCount;
}
}
/** This is a class which describes an individual throttle limit, in fetches. */
protected static class ThrottleLimitSpec
{
/** Regexp */
protected Pattern regexp;
/** The fetch limit for all bins matching that regexp */
protected int maxCount;
/** Constructor */
public ThrottleLimitSpec(String regexp, int maxCount)
throws PatternSyntaxException
{
this.regexp = Pattern.compile(regexp);
this.maxCount = maxCount;
}
/** Get the regexp. */
public Pattern getRegexp()
{
return regexp;
}
/** Get the max count */
public int getMaxCount()
{
return maxCount;
}
}
/** Mutable integer class.
*/
protected static class MutableInteger
{
int value;
/** Construct.
*/
public MutableInteger(int value)
{
this.value = value;
}
/** Duplicate */
public MutableInteger duplicate()
{
return new MutableInteger(value);
}
/** Decrement.
*/
public void decrement()
{
value--;
}
/** Increment.
*/
public void increment()
{
value++;
}
/** Get value.
*/
public int intValue()
{
return value;
}
}
}
|
diff --git a/api/src/main/java/com/cloudbees/sdk/UserConfiguration.java b/api/src/main/java/com/cloudbees/sdk/UserConfiguration.java
index f72c276..14e55d0 100644
--- a/api/src/main/java/com/cloudbees/sdk/UserConfiguration.java
+++ b/api/src/main/java/com/cloudbees/sdk/UserConfiguration.java
@@ -1,189 +1,191 @@
package com.cloudbees.sdk;
import com.cloudbees.api.AccountInfo;
import com.cloudbees.api.AccountKeysResponse;
import com.cloudbees.api.AccountListResponse;
import com.cloudbees.api.BeesClient;
import com.cloudbees.api.BeesClientConfiguration;
import com.cloudbees.api.BeesClientException;
import com.cloudbees.sdk.cli.DirectoryStructure;
import com.cloudbees.sdk.cli.Verbose;
import com.cloudbees.sdk.utils.Helper;
import com.cloudbees.sdk.utils.PasswordHelper;
import javax.inject.Inject;
import java.io.File;
import java.io.FileOutputStream;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Injectable component that encapsulates the user configuration
* and its persistence to {@code ~/.bees/bees.config}
*
* <h2>Parameters vs Config properties</h2>
* <p>
* For historical reasons, the code distinguishes "config properties" and "parameters."
* Both are string->string key/value pairs, and both captures various aspects of how
* we talk to CloudBees backend, but for reasons beyond me, they use different key names
* to represent the same thing. For example, "config properties" would have "bees.api.key" for
* API key, whereas "parameters" would use "key".
*
* <p>
* "config properties" are tied to the persisted {@ccode ~/.bees/bees.config} whereas
* parameters appear to be transient within one invocation.
*
* <p>
* There's no constants defined for any of those keys.
*
* <p>
* Most likely this is unintended technical debt over time, but for the time being
* I'm not touching it. I recommend unifying them to consistently use "config properties" key
* names.
*
* @author Kohsuke Kawaguchi
*/
public class UserConfiguration {
@Inject
DirectoryStructure directoryStructure;
@Inject
Verbose verbose;
/**
* Loads the configuration, by creating it if necessary.
*
* @param parameters
* used only if we are to create configuration.
*/
public Properties load(int credentialType, Map<String,String> parameters) {
File userConfigFile = getConfigFile();
Properties properties = new Properties();
properties.setProperty("bees.api.url.us", "https://api.cloudbees.com/api");
properties.setProperty("bees.api.url.eu", "https://api-eu.cloudbees.com/api");
if (!Helper.loadProperties(userConfigFile, properties)) {
properties = create(credentialType, parameters);
}
return properties;
}
public File getConfigFile() {
return new File(directoryStructure.localRepository, "bees.config");
}
/**
* Creates a new configuration file.
*/
public Properties create(int credentialType, Map<String, String> paramaters) {
Properties properties = new Properties();
+ properties.setProperty("bees.api.url.us", "https://api.cloudbees.com/api");
+ properties.setProperty("bees.api.url.eu", "https://api-eu.cloudbees.com/api");
System.out.println();
System.out.println("You have not created a CloudBees configuration profile, let's create one now...");
try {
String endPoint = paramaters.get("endPoint");
/*
while (endPoint == null || endPoint.equalsIgnoreCase("us") || endPoint.equalsIgnoreCase("eu")) {
endPoint = Helper.promptFor("Enter your default CloudBees API end point [us | eu]: ", true);
}
*/
if (endPoint == null) endPoint = "us";
String server = paramaters.get("server");
if (server == null) server = properties.getProperty("bees.api.url." + endPoint);
properties.setProperty("bees.api.url", server);
String key = paramaters.get("key");
String secret = paramaters.get("secret");
String domain = paramaters.get("domain");
if (key == null || secret == null) {
if (credentialType == KEYS_CREDENTIALS) {
System.out.println("Go to https://grandcentral.cloudbees.com/user/keys to retrieve your API key");
System.out.println();
} else if (credentialType == EMAIL_CREDENTIALS) {
String email = paramaters.get("email");
if (email == null)
email = Helper.promptFor("Enter your CloudBees account email address: ", true);
String password = paramaters.get("password");
if (password == null) {
password = PasswordHelper.prompt("Enter your CloudBees account password: ");
}
// Get the API key & secret
BeesClientConfiguration beesClientConfiguration = new BeesClientConfiguration(server, "1", "0", "xml", "1.0");
// Set proxy information
beesClientConfiguration.setProxyHost(paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
beesClientConfiguration.setProxyPort(Integer.parseInt(paramaters.get("proxy.port")));
beesClientConfiguration.setProxyUser(paramaters.get("proxy.user"));
beesClientConfiguration.setProxyPassword(paramaters.get("proxy.password"));
BeesClient staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountKeysResponse response = staxClient.accountKeys(domain, email, password);
key = response.getKey();
secret = response.getSecret();
// Get the default account name
beesClientConfiguration.setApiKey(key);
beesClientConfiguration.setSecret(secret);
staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountListResponse listResponse = staxClient.accountList();
List<AccountInfo> accounts = listResponse.getAccounts();
if (accounts.size() == 1) {
domain = accounts.get(0).getName();
} else {
String accountsString = null;
for (AccountInfo info: accounts) {
if (accountsString == null)
accountsString = info.getName();
else
accountsString += "," + info.getName();
}
System.out.println("You have several accounts: " + accountsString);
domain = Helper.promptFor("Enter your default CloudBees account name : ", true);
}
}
}
if (key == null) key = Helper.promptFor("Enter your CloudBees API key: ", true);
if (secret == null) secret = Helper.promptFor("Enter your CloudBees secret: ", true);
if (domain == null) domain = Helper.promptFor("Enter your default CloudBees account name: ", true);
properties.setProperty("bees.api.key", key);
properties.setProperty("bees.api.secret", secret);
properties.setProperty("bees.project.app.domain", domain);
if (paramaters.get("proxy.host") != null)
properties.setProperty("bees.api.proxy.host", paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
properties.setProperty("bees.api.proxy.port", paramaters.get("proxy.port"));
if (paramaters.get("proxy.user") != null)
properties.setProperty("bees.api.proxy.user", paramaters.get("proxy.user"));
if (paramaters.get("proxy.password") != null)
properties.setProperty("bees.api.proxy.password", paramaters.get("proxy.password"));
getConfigFile().getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(getConfigFile());
properties.store(fos, "CloudBees SDK config");
fos.close();
return properties;
} catch (BeesClientException e) {
String errCode = e.getError().getErrorCode();
if (errCode != null && errCode.equals("AuthFailure"))
throw new BeesSecurityException("Authentication failure, please check credentials!", e);
else
throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) {
throw new RuntimeException("Cannot create configuration", e);
}
}
public static int EMAIL_CREDENTIALS = 0;
public static int KEYS_CREDENTIALS = 1;
}
| true | true | public Properties create(int credentialType, Map<String, String> paramaters) {
Properties properties = new Properties();
System.out.println();
System.out.println("You have not created a CloudBees configuration profile, let's create one now...");
try {
String endPoint = paramaters.get("endPoint");
/*
while (endPoint == null || endPoint.equalsIgnoreCase("us") || endPoint.equalsIgnoreCase("eu")) {
endPoint = Helper.promptFor("Enter your default CloudBees API end point [us | eu]: ", true);
}
*/
if (endPoint == null) endPoint = "us";
String server = paramaters.get("server");
if (server == null) server = properties.getProperty("bees.api.url." + endPoint);
properties.setProperty("bees.api.url", server);
String key = paramaters.get("key");
String secret = paramaters.get("secret");
String domain = paramaters.get("domain");
if (key == null || secret == null) {
if (credentialType == KEYS_CREDENTIALS) {
System.out.println("Go to https://grandcentral.cloudbees.com/user/keys to retrieve your API key");
System.out.println();
} else if (credentialType == EMAIL_CREDENTIALS) {
String email = paramaters.get("email");
if (email == null)
email = Helper.promptFor("Enter your CloudBees account email address: ", true);
String password = paramaters.get("password");
if (password == null) {
password = PasswordHelper.prompt("Enter your CloudBees account password: ");
}
// Get the API key & secret
BeesClientConfiguration beesClientConfiguration = new BeesClientConfiguration(server, "1", "0", "xml", "1.0");
// Set proxy information
beesClientConfiguration.setProxyHost(paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
beesClientConfiguration.setProxyPort(Integer.parseInt(paramaters.get("proxy.port")));
beesClientConfiguration.setProxyUser(paramaters.get("proxy.user"));
beesClientConfiguration.setProxyPassword(paramaters.get("proxy.password"));
BeesClient staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountKeysResponse response = staxClient.accountKeys(domain, email, password);
key = response.getKey();
secret = response.getSecret();
// Get the default account name
beesClientConfiguration.setApiKey(key);
beesClientConfiguration.setSecret(secret);
staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountListResponse listResponse = staxClient.accountList();
List<AccountInfo> accounts = listResponse.getAccounts();
if (accounts.size() == 1) {
domain = accounts.get(0).getName();
} else {
String accountsString = null;
for (AccountInfo info: accounts) {
if (accountsString == null)
accountsString = info.getName();
else
accountsString += "," + info.getName();
}
System.out.println("You have several accounts: " + accountsString);
domain = Helper.promptFor("Enter your default CloudBees account name : ", true);
}
}
}
if (key == null) key = Helper.promptFor("Enter your CloudBees API key: ", true);
if (secret == null) secret = Helper.promptFor("Enter your CloudBees secret: ", true);
if (domain == null) domain = Helper.promptFor("Enter your default CloudBees account name: ", true);
properties.setProperty("bees.api.key", key);
properties.setProperty("bees.api.secret", secret);
properties.setProperty("bees.project.app.domain", domain);
if (paramaters.get("proxy.host") != null)
properties.setProperty("bees.api.proxy.host", paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
properties.setProperty("bees.api.proxy.port", paramaters.get("proxy.port"));
if (paramaters.get("proxy.user") != null)
properties.setProperty("bees.api.proxy.user", paramaters.get("proxy.user"));
if (paramaters.get("proxy.password") != null)
properties.setProperty("bees.api.proxy.password", paramaters.get("proxy.password"));
getConfigFile().getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(getConfigFile());
properties.store(fos, "CloudBees SDK config");
fos.close();
return properties;
} catch (BeesClientException e) {
String errCode = e.getError().getErrorCode();
if (errCode != null && errCode.equals("AuthFailure"))
throw new BeesSecurityException("Authentication failure, please check credentials!", e);
else
throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) {
throw new RuntimeException("Cannot create configuration", e);
}
}
| public Properties create(int credentialType, Map<String, String> paramaters) {
Properties properties = new Properties();
properties.setProperty("bees.api.url.us", "https://api.cloudbees.com/api");
properties.setProperty("bees.api.url.eu", "https://api-eu.cloudbees.com/api");
System.out.println();
System.out.println("You have not created a CloudBees configuration profile, let's create one now...");
try {
String endPoint = paramaters.get("endPoint");
/*
while (endPoint == null || endPoint.equalsIgnoreCase("us") || endPoint.equalsIgnoreCase("eu")) {
endPoint = Helper.promptFor("Enter your default CloudBees API end point [us | eu]: ", true);
}
*/
if (endPoint == null) endPoint = "us";
String server = paramaters.get("server");
if (server == null) server = properties.getProperty("bees.api.url." + endPoint);
properties.setProperty("bees.api.url", server);
String key = paramaters.get("key");
String secret = paramaters.get("secret");
String domain = paramaters.get("domain");
if (key == null || secret == null) {
if (credentialType == KEYS_CREDENTIALS) {
System.out.println("Go to https://grandcentral.cloudbees.com/user/keys to retrieve your API key");
System.out.println();
} else if (credentialType == EMAIL_CREDENTIALS) {
String email = paramaters.get("email");
if (email == null)
email = Helper.promptFor("Enter your CloudBees account email address: ", true);
String password = paramaters.get("password");
if (password == null) {
password = PasswordHelper.prompt("Enter your CloudBees account password: ");
}
// Get the API key & secret
BeesClientConfiguration beesClientConfiguration = new BeesClientConfiguration(server, "1", "0", "xml", "1.0");
// Set proxy information
beesClientConfiguration.setProxyHost(paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
beesClientConfiguration.setProxyPort(Integer.parseInt(paramaters.get("proxy.port")));
beesClientConfiguration.setProxyUser(paramaters.get("proxy.user"));
beesClientConfiguration.setProxyPassword(paramaters.get("proxy.password"));
BeesClient staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountKeysResponse response = staxClient.accountKeys(domain, email, password);
key = response.getKey();
secret = response.getSecret();
// Get the default account name
beesClientConfiguration.setApiKey(key);
beesClientConfiguration.setSecret(secret);
staxClient = new BeesClient(beesClientConfiguration);
staxClient.setVerbose(verbose.isVerbose());
AccountListResponse listResponse = staxClient.accountList();
List<AccountInfo> accounts = listResponse.getAccounts();
if (accounts.size() == 1) {
domain = accounts.get(0).getName();
} else {
String accountsString = null;
for (AccountInfo info: accounts) {
if (accountsString == null)
accountsString = info.getName();
else
accountsString += "," + info.getName();
}
System.out.println("You have several accounts: " + accountsString);
domain = Helper.promptFor("Enter your default CloudBees account name : ", true);
}
}
}
if (key == null) key = Helper.promptFor("Enter your CloudBees API key: ", true);
if (secret == null) secret = Helper.promptFor("Enter your CloudBees secret: ", true);
if (domain == null) domain = Helper.promptFor("Enter your default CloudBees account name: ", true);
properties.setProperty("bees.api.key", key);
properties.setProperty("bees.api.secret", secret);
properties.setProperty("bees.project.app.domain", domain);
if (paramaters.get("proxy.host") != null)
properties.setProperty("bees.api.proxy.host", paramaters.get("proxy.host"));
if (paramaters.get("proxy.port") != null)
properties.setProperty("bees.api.proxy.port", paramaters.get("proxy.port"));
if (paramaters.get("proxy.user") != null)
properties.setProperty("bees.api.proxy.user", paramaters.get("proxy.user"));
if (paramaters.get("proxy.password") != null)
properties.setProperty("bees.api.proxy.password", paramaters.get("proxy.password"));
getConfigFile().getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(getConfigFile());
properties.store(fos, "CloudBees SDK config");
fos.close();
return properties;
} catch (BeesClientException e) {
String errCode = e.getError().getErrorCode();
if (errCode != null && errCode.equals("AuthFailure"))
throw new BeesSecurityException("Authentication failure, please check credentials!", e);
else
throw new RuntimeException(e.getMessage(), e);
} catch (Exception e) {
throw new RuntimeException("Cannot create configuration", e);
}
}
|
diff --git a/plugins/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/index/SerializableDataSetNumberIndex.java b/plugins/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/index/SerializableDataSetNumberIndex.java
index bf2b2113d..aad143959 100644
--- a/plugins/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/index/SerializableDataSetNumberIndex.java
+++ b/plugins/org.eclipse.birt.data/src/org/eclipse/birt/data/engine/impl/index/SerializableDataSetNumberIndex.java
@@ -1,131 +1,132 @@
/*******************************************************************************
* Copyright (c) 2004, 2010 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.data.engine.impl.index;
import java.io.DataOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.eclipse.birt.core.archive.RAOutputStream;
import org.eclipse.birt.core.util.IOUtil;
import org.eclipse.birt.data.engine.core.DataException;
import org.eclipse.birt.data.engine.script.ScriptEvalUtil;
public class SerializableDataSetNumberIndex<T> implements IIndexSerializer
{
private static int BLOCKNUMBER = 5000;
private Map<T, List<Integer>> numberAndIndex = new HashMap<T, List<Integer>>( );
private RAOutputStream output;
public SerializableDataSetNumberIndex( RAOutputStream stream )
{
this.output = stream;
}
/**
* Put a value into index
*
* @param number
* @param index
* @throws DataException
*/
public Object put( Object number, Object index ) throws DataException
{
if ( this.numberAndIndex.containsKey( number ) )
{
this.numberAndIndex.get( number ).add( (Integer)index );
}
else
{
List<Integer> list = new ArrayList<Integer>( );
list.add( (Integer) index );
this.numberAndIndex.put( (T)number, list );
}
return null;
}
public void close( ) throws DataException
{
try
{
List<T> keyList = new LinkedList<T>( );
keyList.addAll( this.numberAndIndex.keySet( ) );
if ( keyList.size( ) == 0 )
{
+ IOUtil.writeInt( output, 0 );
output.close( );
return;
}
Collections.sort( keyList, new NumberComparator<T>( ) );
int segs = ( keyList.size( ) - 1 ) / BLOCKNUMBER + 1;
IOUtil.writeInt( output, segs );
long intOffset = output.getOffset( );
DataOutputStream dout = new DataOutputStream( output );
long[] offsets = new long[segs];
Object[] boundaryValues = new Object[segs];
for ( int i = 0; i < segs; i++ )
{
IOUtil.writeLong( dout, 0 );
boundaryValues[i] = keyList.get( i * BLOCKNUMBER );
}
for ( int i = 0; i < boundaryValues.length; i++ )
{
IOUtil.writeObject( dout, boundaryValues[i] );
}
for ( int i = 0; i < segs; i++ )
{
offsets[i] = output.getOffset( );
IOUtil.writeInt( dout, i == segs - 1? keyList.size( )%BLOCKNUMBER : BLOCKNUMBER );
for ( int j = i * BLOCKNUMBER; j < ( i + 1 ) * BLOCKNUMBER && j < keyList.size( ); j++ )
{
IOUtil.writeObject( dout, keyList.get( j ) );
IOUtil.writeList( dout,
numberAndIndex.get( keyList.get( j ) ) );
}
}
// Seek to the offset recording location;
output.seek( intOffset );
for ( int i = 0; i < offsets.length; i++ )
{
IOUtil.writeLong( dout, offsets[i] );
}
output.close( );
}
catch ( Exception e )
{
throw new DataException( e.getLocalizedMessage( ), e );
}
}
private class NumberComparator<T1> implements Comparator<T>
{
public int compare( T o1, T o2 )
{
try
{
return ScriptEvalUtil.compare( o1, o2 );
}
catch ( DataException e )
{
throw new RuntimeException( e );
}
}
}
}
| true | true | public void close( ) throws DataException
{
try
{
List<T> keyList = new LinkedList<T>( );
keyList.addAll( this.numberAndIndex.keySet( ) );
if ( keyList.size( ) == 0 )
{
output.close( );
return;
}
Collections.sort( keyList, new NumberComparator<T>( ) );
int segs = ( keyList.size( ) - 1 ) / BLOCKNUMBER + 1;
IOUtil.writeInt( output, segs );
long intOffset = output.getOffset( );
DataOutputStream dout = new DataOutputStream( output );
long[] offsets = new long[segs];
Object[] boundaryValues = new Object[segs];
for ( int i = 0; i < segs; i++ )
{
IOUtil.writeLong( dout, 0 );
boundaryValues[i] = keyList.get( i * BLOCKNUMBER );
}
for ( int i = 0; i < boundaryValues.length; i++ )
{
IOUtil.writeObject( dout, boundaryValues[i] );
}
for ( int i = 0; i < segs; i++ )
{
offsets[i] = output.getOffset( );
IOUtil.writeInt( dout, i == segs - 1? keyList.size( )%BLOCKNUMBER : BLOCKNUMBER );
for ( int j = i * BLOCKNUMBER; j < ( i + 1 ) * BLOCKNUMBER && j < keyList.size( ); j++ )
{
IOUtil.writeObject( dout, keyList.get( j ) );
IOUtil.writeList( dout,
numberAndIndex.get( keyList.get( j ) ) );
}
}
// Seek to the offset recording location;
output.seek( intOffset );
for ( int i = 0; i < offsets.length; i++ )
{
IOUtil.writeLong( dout, offsets[i] );
}
output.close( );
}
catch ( Exception e )
{
throw new DataException( e.getLocalizedMessage( ), e );
}
}
| public void close( ) throws DataException
{
try
{
List<T> keyList = new LinkedList<T>( );
keyList.addAll( this.numberAndIndex.keySet( ) );
if ( keyList.size( ) == 0 )
{
IOUtil.writeInt( output, 0 );
output.close( );
return;
}
Collections.sort( keyList, new NumberComparator<T>( ) );
int segs = ( keyList.size( ) - 1 ) / BLOCKNUMBER + 1;
IOUtil.writeInt( output, segs );
long intOffset = output.getOffset( );
DataOutputStream dout = new DataOutputStream( output );
long[] offsets = new long[segs];
Object[] boundaryValues = new Object[segs];
for ( int i = 0; i < segs; i++ )
{
IOUtil.writeLong( dout, 0 );
boundaryValues[i] = keyList.get( i * BLOCKNUMBER );
}
for ( int i = 0; i < boundaryValues.length; i++ )
{
IOUtil.writeObject( dout, boundaryValues[i] );
}
for ( int i = 0; i < segs; i++ )
{
offsets[i] = output.getOffset( );
IOUtil.writeInt( dout, i == segs - 1? keyList.size( )%BLOCKNUMBER : BLOCKNUMBER );
for ( int j = i * BLOCKNUMBER; j < ( i + 1 ) * BLOCKNUMBER && j < keyList.size( ); j++ )
{
IOUtil.writeObject( dout, keyList.get( j ) );
IOUtil.writeList( dout,
numberAndIndex.get( keyList.get( j ) ) );
}
}
// Seek to the offset recording location;
output.seek( intOffset );
for ( int i = 0; i < offsets.length; i++ )
{
IOUtil.writeLong( dout, offsets[i] );
}
output.close( );
}
catch ( Exception e )
{
throw new DataException( e.getLocalizedMessage( ), e );
}
}
|
diff --git a/ide/eclipse/esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/SendMediatorEditPart.java b/ide/eclipse/esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/SendMediatorEditPart.java
index 2989dc057..83a840606 100644
--- a/ide/eclipse/esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/SendMediatorEditPart.java
+++ b/ide/eclipse/esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/SendMediatorEditPart.java
@@ -1,500 +1,501 @@
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import java.util.Iterator;
import org.eclipse.core.resources.IFile;
import org.eclipse.draw2d.GridData;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.PositionConstants;
import org.eclipse.draw2d.RoundedRectangle;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.ToolbarLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.BorderItemSelectionEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CreationEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.DragDropEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.diagram.ui.figures.BorderItemLocator;
import org.eclipse.gmf.runtime.draw2d.ui.figures.ConstrainedToolbarLayout;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.emf.type.core.commands.SetValueCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.SetRequest;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IFileEditorInput;
import org.eclipse.ui.PlatformUI;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractEndpoint;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGraphicalShapeWithLabel;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EsbGroupingShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedBorderItemLocator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.FixedSizedAbstractMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.SendMediatorGraphicalShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.ShowPropertyViewEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.SingleCompartmentComplexFiguredAbstractMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.editpolicy.FeedbackIndicateDragDropEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.ElementDuplicator;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.MediatorFigureReverser;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.SendMediatorCanonicalEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.SendMediatorItemSemanticEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbMultiPageEditor;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
import static org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage.Literals.END_POINT__IN_LINE;
/**
* @generated NOT
*/
public class SendMediatorEditPart extends SingleCompartmentComplexFiguredAbstractMediator {
public IFigure endpointOutputConnector;
/**
* @generated
*/
public static final int VISUAL_ID = 3515;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
public SendMediatorEditPart(View view) {
super(view);
}
/**
* @generated NOT
*/
protected void createDefaultEditPolicies() {
installEditPolicy(EditPolicyRoles.CREATION_ROLE, new CreationEditPolicy());
super.createDefaultEditPolicies();
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new SendMediatorItemSemanticEditPolicy());
installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new DragDropEditPolicy());
installEditPolicy(EditPolicyRoles.DRAG_DROP_ROLE, new FeedbackIndicateDragDropEditPolicy());
installEditPolicy(EditPolicyRoles.CANONICAL_ROLE, new SendMediatorCanonicalEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
// For handle Double click Event.
installEditPolicy(EditPolicyRoles.OPEN_ROLE, new ShowPropertyViewEditPolicy());
// XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable editpolicies
// removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
}
/**
* @generated
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
View childView = (View) child.getModel();
switch (EsbVisualIDRegistry.getVisualID(childView)) {
case SendMediatorInputConnectorEditPart.VISUAL_ID:
case SendMediatorOutputConnectorEditPart.VISUAL_ID:
return new BorderItemSelectionEditPolicy();
}
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
public void notifyChanged(Notification notification) {
super.notifyChanged(notification);
/*
if(notification.getFeature() instanceof EReference){
if("StaticReceivingSequence".equals(((EReference)notification.getFeature()).getName())){
String oldValue=((RegistryKeyProperty) notification.getOldValue()).getKeyValue();
String newValue=((RegistryKeyProperty) notification.getNewValue()).getKeyValue();
if(!oldValue.equals(newValue)){
IEditorPart activeEditor=PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().getActiveEditor();
if (activeEditor instanceof EsbMultiPageEditor) {
IFile openedFile = ((IFileEditorInput)activeEditor.getEditorInput()).getFile();
ElementDuplicator endPointDuplicator = new ElementDuplicator(openedFile.getProject(),((EsbMultiPageEditor)activeEditor).getGraphicalEditor());
endPointDuplicator.updateAssociatedDiagrams((EsbMultiPageEditor)activeEditor);
}
}
}
}*/
if (notification.getFeature() instanceof EAttribute) {
if (EsbPackage.eINSTANCE.getSendMediator_SkipSerialization().equals(
notification.getFeature())) {
updateEndpointInlineProperty(notification);
}
}
}
/**
* Updates inlineProperty of endpoint when skip serialization property has changed
* @param notification
*/
private void updateEndpointInlineProperty(Notification notification) {
if (notification.getNewBooleanValue() != notification.getOldBooleanValue()) {
IGraphicalEditPart mediatorFlow = getChildBySemanticHint(EsbVisualIDRegistry
.getType(MediatorFlow19EditPart.VISUAL_ID));
if (mediatorFlow != null) {
IGraphicalEditPart mediatorFlowCompartment = mediatorFlow
.getChildBySemanticHint(EsbVisualIDRegistry
.getType(MediatorFlowMediatorFlowCompartment19EditPart.VISUAL_ID));
if (mediatorFlowCompartment != null) {
Iterator<?> iterator = mediatorFlowCompartment.getChildren().iterator();
while (iterator.hasNext()) {
Object next = iterator.next();
if (next instanceof AbstractEndpoint) {
SetRequest reqSet = new SetRequest(getEditingDomain(),
((View) ((AbstractEndpoint) next).getModel()).getElement(),
END_POINT__IN_LINE, notification.getNewBooleanValue());
SetValueCommand operation = new SetValueCommand(reqSet);
if (operation.canExecute()) {
getDiagramEditDomain().getDiagramCommandStack().execute(
new ICommandProxy(operation));
}
break;
}
}
}
}
}
}
/**
* @generated NOT
*/
protected IFigure createNodeShape() {
return primaryShape = new SendMediatorFigure() {
public void setBounds(org.eclipse.draw2d.geometry.Rectangle rect) {
super.setBounds(rect);
if ((!connected)
&& (this.getBounds().getLocation().x != 0 && this.getBounds().getLocation().y != 0)) {
connectToMostSuitableElement();
reAllocate(rect);
connected = true;
}
};
};
}
/**
* @generated
*/
public SendMediatorFigure getPrimaryShape() {
return (SendMediatorFigure) primaryShape;
}
protected boolean addFixedChild(EditPart childEditPart) {
if (childEditPart instanceof SendMediatorDescriptionEditPart) {
((SendMediatorDescriptionEditPart) childEditPart).setLabel(getPrimaryShape()
.getFigureSendMediatorPropertyValue());
return true;
}
if (childEditPart instanceof SendMediatorInputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorInputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.WEST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
return true;
} else if (childEditPart instanceof SendMediatorOutputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorOutputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.EAST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
+ borderItemFigure.setEnabled(false);
return true;
}
if (childEditPart instanceof SendMediatorEndpointOutputConnectorEditPart) {
endpointOutputConnector = ((SendMediatorEndpointOutputConnectorEditPart) childEditPart)
.getFigure();
endpointOutputConnector.setEnabled(false);
}
/*if (childEditPart instanceof ScriptMediatorScriptLanguageEditPart) {
((ScriptMediatorScriptLanguageEditPart) childEditPart)
.setLabel(getPrimaryShape()
.getFigureScriptMediatorPropertyValue());
return true;
}*/
return false;
}
protected boolean removeFixedChild(EditPart childEditPart) {
if (childEditPart instanceof SendMediatorDescriptionEditPart) {
return true;
}
return false;
}
protected void addChildVisual(EditPart childEditPart, int index) {
if (addFixedChild(childEditPart)) {
return;
}
super.addChildVisual(childEditPart, -1);
}
protected void removeChildVisual(EditPart childEditPart) {
if (removeFixedChild(childEditPart)) {
return;
}
super.removeChildVisual(childEditPart);
}
protected IFigure getContentPaneFor(IGraphicalEditPart editPart) {
if (editPart instanceof IBorderItemEditPart) {
return getBorderedFigure().getBorderItemContainer();
}
return getContentPane();
}
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(40, 40);
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model
* so you may safely remove <i>generated</i> tag and modify it.
*
* @generated
*/
protected NodeFigure createMainFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShape();
figure.add(shape);
contentPane = setupContentPane(shape);
return figure;
}
/**
* Default implementation treats passed figure as content pane.
* Respects layout one may have set for generated figure.
* @param nodeShape instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
if (nodeShape.getLayoutManager() == null) {
ConstrainedToolbarLayout layout = new ConstrainedToolbarLayout();
layout.setSpacing(5);
nodeShape.setLayoutManager(layout);
}
return nodeShape; // use nodeShape itself as contentPane
}
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated NOT
*/
//public class SendMediatorFigure extends SendMediatorGraphicalShape {
public class SendMediatorFigure extends EsbGroupingShape {
/**
* @generated
*/
private WrappingLabel fFigureSendMediatorPropertyValue;
private WrappingLabel sendMediatorDescriptionLabel;
/**
* @generated NOT
*/
public SendMediatorFigure() {
/* ToolbarLayout layoutThis = new ToolbarLayout();
layoutThis.setStretchMinorAxis(true);
layoutThis.setMinorAlignment(ToolbarLayout.ALIGN_CENTER);
layoutThis.setSpacing(0);
layoutThis.setVertical(false);
this.setLayoutManager(layoutThis);
this.setPreferredSize(new Dimension(getMapMode().DPtoLP(250),
getMapMode().DPtoLP(100)));
this.setOutline(true);*/
ToolbarLayout layoutThis = new ToolbarLayout();
layoutThis.setStretchMinorAxis(true);
layoutThis.setMinorAlignment(ToolbarLayout.ALIGN_CENTER);
layoutThis.setSpacing(0);
layoutThis.setVertical(false);
this.setLayoutManager(layoutThis);
this.setPreferredSize(new Dimension(getMapMode().DPtoLP(170), getMapMode().DPtoLP(100)));
this.setOutline(false);
this.setBackgroundColor(THIS_BACK);
createContents();
}
/* public void add(IFigure figure, Object constraint, int index) {
if (figure instanceof DefaultSizeNodeFigure) {
GridData layoutData = new GridData();
layoutData.grabExcessHorizontalSpace = true;
layoutData.grabExcessVerticalSpace = true;
layoutData.horizontalAlignment = GridData.FILL;
layoutData.verticalAlignment = GridData.FILL;
super.add(figure, layoutData, index);
} else if (figure instanceof RoundedRectangle) {
GridData layoutData = new GridData();
layoutData.grabExcessHorizontalSpace = true;
layoutData.grabExcessVerticalSpace = true;
layoutData.horizontalAlignment = GridData.FILL;
layoutData.verticalAlignment = GridData.FILL;
super.add(figure, layoutData, index);
}
else {
super.add(figure, constraint, index);
}
}*/
public void add(IFigure figure, Object constraint, int index) {
if (figure instanceof DefaultSizeNodeFigure) {
GridData layoutData = new GridData();
layoutData.grabExcessHorizontalSpace = true;
layoutData.grabExcessVerticalSpace = true;
layoutData.horizontalAlignment = GridData.FILL;
layoutData.verticalAlignment = GridData.FILL;
super.add(figure, layoutData, index);
} else if (figure instanceof RoundedRectangle) {
GridData layoutData = new GridData();
layoutData.grabExcessHorizontalSpace = true;
layoutData.grabExcessVerticalSpace = true;
layoutData.horizontalAlignment = GridData.FILL;
layoutData.verticalAlignment = GridData.FILL;
super.add(figure, layoutData, index);
}
else {
super.add(figure, constraint, index);
}
}
/**
* @generated NOT
*/
private void createContents() {
fFigureSendMediatorPropertyValue = new WrappingLabel();
fFigureSendMediatorPropertyValue.setText("<...>");
fFigureSendMediatorPropertyValue.setAlignment(SWT.CENTER);
//this.getPropertyValueRectangle1().add(fFigureSendMediatorPropertyValue);
//sendMediatorDescriptionLabel = getPropertyNameLabel();
}
/**
* @generated
*/
public WrappingLabel getFigureSendMediatorPropertyValue() {
return fFigureSendMediatorPropertyValue;
}
public WrappingLabel getSendMediatorDescriptionLabel() {
return sendMediatorDescriptionLabel;
}
public String getIconPath() {
return "icons/ico20/send-mediator.gif";
}
public String getNodeName() {
return "Send";
}
public IFigure getToolTip() {
return new Label("Send a message out");
}
}
/**
* @generated NOT
*/
static final Color THIS_BACK = new Color(null, 230, 230, 230);
}
| true | true | protected boolean addFixedChild(EditPart childEditPart) {
if (childEditPart instanceof SendMediatorDescriptionEditPart) {
((SendMediatorDescriptionEditPart) childEditPart).setLabel(getPrimaryShape()
.getFigureSendMediatorPropertyValue());
return true;
}
if (childEditPart instanceof SendMediatorInputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorInputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.WEST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
return true;
} else if (childEditPart instanceof SendMediatorOutputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorOutputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.EAST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
return true;
}
if (childEditPart instanceof SendMediatorEndpointOutputConnectorEditPart) {
endpointOutputConnector = ((SendMediatorEndpointOutputConnectorEditPart) childEditPart)
.getFigure();
endpointOutputConnector.setEnabled(false);
}
/*if (childEditPart instanceof ScriptMediatorScriptLanguageEditPart) {
((ScriptMediatorScriptLanguageEditPart) childEditPart)
.setLabel(getPrimaryShape()
.getFigureScriptMediatorPropertyValue());
return true;
}*/
| protected boolean addFixedChild(EditPart childEditPart) {
if (childEditPart instanceof SendMediatorDescriptionEditPart) {
((SendMediatorDescriptionEditPart) childEditPart).setLabel(getPrimaryShape()
.getFigureSendMediatorPropertyValue());
return true;
}
if (childEditPart instanceof SendMediatorInputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorInputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.WEST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
return true;
} else if (childEditPart instanceof SendMediatorOutputConnectorEditPart) {
IFigure borderItemFigure = ((SendMediatorOutputConnectorEditPart) childEditPart)
.getFigure();
BorderItemLocator locator = new FixedBorderItemLocator(getMainFigure(),
borderItemFigure, PositionConstants.EAST, 0.5);
getBorderedFigure().getBorderItemContainer().add(borderItemFigure, locator);
borderItemFigure.setEnabled(false);
return true;
}
if (childEditPart instanceof SendMediatorEndpointOutputConnectorEditPart) {
endpointOutputConnector = ((SendMediatorEndpointOutputConnectorEditPart) childEditPart)
.getFigure();
endpointOutputConnector.setEnabled(false);
}
/*if (childEditPart instanceof ScriptMediatorScriptLanguageEditPart) {
((ScriptMediatorScriptLanguageEditPart) childEditPart)
.setLabel(getPrimaryShape()
.getFigureScriptMediatorPropertyValue());
return true;
}*/
|
diff --git a/src/java/TestFilter.java b/src/java/TestFilter.java
index 936ced5..2584926 100644
--- a/src/java/TestFilter.java
+++ b/src/java/TestFilter.java
@@ -1,116 +1,116 @@
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
/**
*
* @author chappljd.
* Created Mar 23, 2013.
*/
public class TestFilter {
private static final String JDBC_DRIVER = "com.mysql.jdbc.Driver";
private static final String DB_URL = "jdbc:mysql://localhost/vcf_analyzer";
static final String USER = "vcf_user";
static final String PASS = "vcf";
public static void main(String[] args) throws Exception {
FilterApplier command = new FilterWriteApplier("2013-04-10_13:54", "", "FilterTest.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest.txt", "FilterPass.txt");
Connection conn = null;
Statement stmt = null;
try {
Class.forName(JDBC_DRIVER);
conn = DriverManager.getConnection(DB_URL, USER, PASS);
stmt = conn.createStatement();
}
catch (Exception e) {
throw new SQLException(e.getMessage());
}
String sql = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier'";
String sql2 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier2'";
String sql3 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier3'";
try {
stmt.executeUpdate(sql);
stmt.executeUpdate(sql2);
stmt.executeUpdate(sql3);
} catch (SQLException se) {
throw new SQLException(se.getMessage());
}
String filterName = "testFilterApplier";
String[] operands = {"entry AC=7"};
FilterCreator testCreator = new FilterCreator(filterName, operands);
testCreator.uploadEntries();
//second Test
command = new FilterWriteApplier("2013-04-10_13:54", "testFilterApplier", "FilterTest2.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest2.txt", "FilterPass2.txt");
filterName = "testFilterApplier2";
String[] operands2 = {"entry REF=T"};//, "entry MQ between 78.8 79.0"};
testCreator = new FilterCreator(filterName, operands2);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier2", "FilterTest3.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest3.txt", "FilterPass3.txt");
filterName = "testFilterApplier3";
String[] operands3 = {"entry REF=T", "entry MQ between 78.8 79.0"};
- testCreator = new FilterCreator(filterName, operands2);
+ testCreator = new FilterCreator(filterName, operands3);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier3", "FilterTest4.txt" );
System.out.println( command.execute() );
//fileCompare("FilterTest4.txt", "FilterPass4.txt");
}
private static void fileCompare(String fileTestName, String filePassName) throws FileNotFoundException, IOException {
//compare files
BufferedReader testBuffer = new BufferedReader(new FileReader(fileTestName));
BufferedReader goalBuffer = new BufferedReader(new FileReader(filePassName));
String goal = goalBuffer.readLine();
String toTest = testBuffer.readLine();
while ( goal != null && toTest != null )
{
if ( !goal.equals(toTest) )
{
break;
}
goal = goalBuffer.readLine();
toTest = testBuffer.readLine();
}
if ( goal == null && toTest == null )
{
System.out.println("PASS: Filter test");
}
else
{
System.out.println(String.format( "FAIL: Filter test\n\tgoal: %s\n\tfile: %s", goal, toTest) );
}
testBuffer.close();
goalBuffer.close();
}
}
| true | true | public static void main(String[] args) throws Exception {
FilterApplier command = new FilterWriteApplier("2013-04-10_13:54", "", "FilterTest.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest.txt", "FilterPass.txt");
Connection conn = null;
Statement stmt = null;
try {
Class.forName(JDBC_DRIVER);
conn = DriverManager.getConnection(DB_URL, USER, PASS);
stmt = conn.createStatement();
}
catch (Exception e) {
throw new SQLException(e.getMessage());
}
String sql = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier'";
String sql2 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier2'";
String sql3 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier3'";
try {
stmt.executeUpdate(sql);
stmt.executeUpdate(sql2);
stmt.executeUpdate(sql3);
} catch (SQLException se) {
throw new SQLException(se.getMessage());
}
String filterName = "testFilterApplier";
String[] operands = {"entry AC=7"};
FilterCreator testCreator = new FilterCreator(filterName, operands);
testCreator.uploadEntries();
//second Test
command = new FilterWriteApplier("2013-04-10_13:54", "testFilterApplier", "FilterTest2.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest2.txt", "FilterPass2.txt");
filterName = "testFilterApplier2";
String[] operands2 = {"entry REF=T"};//, "entry MQ between 78.8 79.0"};
testCreator = new FilterCreator(filterName, operands2);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier2", "FilterTest3.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest3.txt", "FilterPass3.txt");
filterName = "testFilterApplier3";
String[] operands3 = {"entry REF=T", "entry MQ between 78.8 79.0"};
testCreator = new FilterCreator(filterName, operands2);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier3", "FilterTest4.txt" );
System.out.println( command.execute() );
//fileCompare("FilterTest4.txt", "FilterPass4.txt");
}
| public static void main(String[] args) throws Exception {
FilterApplier command = new FilterWriteApplier("2013-04-10_13:54", "", "FilterTest.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest.txt", "FilterPass.txt");
Connection conn = null;
Statement stmt = null;
try {
Class.forName(JDBC_DRIVER);
conn = DriverManager.getConnection(DB_URL, USER, PASS);
stmt = conn.createStatement();
}
catch (Exception e) {
throw new SQLException(e.getMessage());
}
String sql = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier'";
String sql2 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier2'";
String sql3 = "DELETE FROM `vcf_analyzer`.`Filter` WHERE `FilName`='testFilterApplier3'";
try {
stmt.executeUpdate(sql);
stmt.executeUpdate(sql2);
stmt.executeUpdate(sql3);
} catch (SQLException se) {
throw new SQLException(se.getMessage());
}
String filterName = "testFilterApplier";
String[] operands = {"entry AC=7"};
FilterCreator testCreator = new FilterCreator(filterName, operands);
testCreator.uploadEntries();
//second Test
command = new FilterWriteApplier("2013-04-10_13:54", "testFilterApplier", "FilterTest2.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest2.txt", "FilterPass2.txt");
filterName = "testFilterApplier2";
String[] operands2 = {"entry REF=T"};//, "entry MQ between 78.8 79.0"};
testCreator = new FilterCreator(filterName, operands2);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier2", "FilterTest3.txt" );
System.out.println( command.execute() );
fileCompare("FilterTest3.txt", "FilterPass3.txt");
filterName = "testFilterApplier3";
String[] operands3 = {"entry REF=T", "entry MQ between 78.8 79.0"};
testCreator = new FilterCreator(filterName, operands3);
testCreator.uploadEntries();
command = new FilterWriteApplier("testFilter", "testFilterApplier3", "FilterTest4.txt" );
System.out.println( command.execute() );
//fileCompare("FilterTest4.txt", "FilterPass4.txt");
}
|
diff --git a/providers/aws-ec2/src/test/java/org/jclouds/aws/ec2/services/AWSSecurityGroupClientLiveTest.java b/providers/aws-ec2/src/test/java/org/jclouds/aws/ec2/services/AWSSecurityGroupClientLiveTest.java
index d043a20df..57dc880e9 100644
--- a/providers/aws-ec2/src/test/java/org/jclouds/aws/ec2/services/AWSSecurityGroupClientLiveTest.java
+++ b/providers/aws-ec2/src/test/java/org/jclouds/aws/ec2/services/AWSSecurityGroupClientLiveTest.java
@@ -1,87 +1,87 @@
/**
*
* Copyright (C) 2011 Cloud Conscious, LLC. <[email protected]>
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.jclouds.aws.ec2.services;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import java.util.Set;
import org.jclouds.ec2.domain.IpPermission;
import org.jclouds.ec2.domain.IpProtocol;
import org.jclouds.ec2.domain.SecurityGroup;
import org.jclouds.ec2.services.SecurityGroupClientLiveTest;
import org.jclouds.ec2.util.IpPermissions;
import org.testng.annotations.Test;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMultimap;
/**
*
* @author Adrian Cole
*/
@Test(groups = "live", singleThreaded = true)
public class AWSSecurityGroupClientLiveTest extends SecurityGroupClientLiveTest {
public AWSSecurityGroupClientLiveTest() {
provider = "aws-ec2";
}
@Test
void testAuthorizeSecurityGroupIngressIpPermission() throws InterruptedException {
final String group1Name = PREFIX + "ingress11";
String group2Name = PREFIX + "ingress12";
cleanupAndSleep(group2Name);
cleanupAndSleep(group1Name);
try {
String group1Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group1Name, group1Name);
String group2Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group2Name, group2Name);
Thread.sleep(100);// eventual consistent
ensureGroupsExist(group1Name, group2Name);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group1Id,
IpPermissions.permit(IpProtocol.TCP).port(80));
assertEventually(new GroupHasPermission(client, group1Name, new TCPPort80AllIPs()));
Set<SecurityGroup> oneResult = client.describeSecurityGroupsInRegion(null, group1Name);
assertNotNull(oneResult);
assertEquals(oneResult.size(), 1);
final SecurityGroup group = oneResult.iterator().next();
assertEquals(group.getName(), group1Name);
- IpPermissions group2CanHttpGroup1 = IpPermissions.permit(IpProtocol.TCP).fromPort(80)
+ IpPermissions group2CanHttpGroup1 = IpPermissions.permit(IpProtocol.TCP).port(80)
.originatingFromSecurityGroupId(group1Id);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasPermission(client, group2Name, new Predicate<IpPermission>() {
@Override
public boolean apply(IpPermission arg0) {
return arg0.getUserIdGroupPairs().equals(ImmutableMultimap.of(group.getOwnerId(), group1Name))
&& arg0.getFromPort() == 80 && arg0.getToPort() == 80 && arg0.getIpProtocol() == IpProtocol.TCP;
}
}));
AWSSecurityGroupClient.class.cast(client).revokeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasNoPermissions(client, group2Name));
} finally {
client.deleteSecurityGroupInRegion(null, group2Name);
client.deleteSecurityGroupInRegion(null, group1Name);
}
}
}
| true | true | void testAuthorizeSecurityGroupIngressIpPermission() throws InterruptedException {
final String group1Name = PREFIX + "ingress11";
String group2Name = PREFIX + "ingress12";
cleanupAndSleep(group2Name);
cleanupAndSleep(group1Name);
try {
String group1Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group1Name, group1Name);
String group2Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group2Name, group2Name);
Thread.sleep(100);// eventual consistent
ensureGroupsExist(group1Name, group2Name);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group1Id,
IpPermissions.permit(IpProtocol.TCP).port(80));
assertEventually(new GroupHasPermission(client, group1Name, new TCPPort80AllIPs()));
Set<SecurityGroup> oneResult = client.describeSecurityGroupsInRegion(null, group1Name);
assertNotNull(oneResult);
assertEquals(oneResult.size(), 1);
final SecurityGroup group = oneResult.iterator().next();
assertEquals(group.getName(), group1Name);
IpPermissions group2CanHttpGroup1 = IpPermissions.permit(IpProtocol.TCP).fromPort(80)
.originatingFromSecurityGroupId(group1Id);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasPermission(client, group2Name, new Predicate<IpPermission>() {
@Override
public boolean apply(IpPermission arg0) {
return arg0.getUserIdGroupPairs().equals(ImmutableMultimap.of(group.getOwnerId(), group1Name))
&& arg0.getFromPort() == 80 && arg0.getToPort() == 80 && arg0.getIpProtocol() == IpProtocol.TCP;
}
}));
AWSSecurityGroupClient.class.cast(client).revokeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasNoPermissions(client, group2Name));
} finally {
client.deleteSecurityGroupInRegion(null, group2Name);
client.deleteSecurityGroupInRegion(null, group1Name);
}
}
| void testAuthorizeSecurityGroupIngressIpPermission() throws InterruptedException {
final String group1Name = PREFIX + "ingress11";
String group2Name = PREFIX + "ingress12";
cleanupAndSleep(group2Name);
cleanupAndSleep(group1Name);
try {
String group1Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group1Name, group1Name);
String group2Id = AWSSecurityGroupClient.class.cast(client).createSecurityGroupInRegionAndReturnId(null,
group2Name, group2Name);
Thread.sleep(100);// eventual consistent
ensureGroupsExist(group1Name, group2Name);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group1Id,
IpPermissions.permit(IpProtocol.TCP).port(80));
assertEventually(new GroupHasPermission(client, group1Name, new TCPPort80AllIPs()));
Set<SecurityGroup> oneResult = client.describeSecurityGroupsInRegion(null, group1Name);
assertNotNull(oneResult);
assertEquals(oneResult.size(), 1);
final SecurityGroup group = oneResult.iterator().next();
assertEquals(group.getName(), group1Name);
IpPermissions group2CanHttpGroup1 = IpPermissions.permit(IpProtocol.TCP).port(80)
.originatingFromSecurityGroupId(group1Id);
AWSSecurityGroupClient.class.cast(client).authorizeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasPermission(client, group2Name, new Predicate<IpPermission>() {
@Override
public boolean apply(IpPermission arg0) {
return arg0.getUserIdGroupPairs().equals(ImmutableMultimap.of(group.getOwnerId(), group1Name))
&& arg0.getFromPort() == 80 && arg0.getToPort() == 80 && arg0.getIpProtocol() == IpProtocol.TCP;
}
}));
AWSSecurityGroupClient.class.cast(client).revokeSecurityGroupIngressInRegion(null, group2Id,
group2CanHttpGroup1);
assertEventually(new GroupHasNoPermissions(client, group2Name));
} finally {
client.deleteSecurityGroupInRegion(null, group2Name);
client.deleteSecurityGroupInRegion(null, group1Name);
}
}
|
diff --git a/project-tests/src/com.phdroid/smsb/activity/BlockedSmsListActivityTest.java b/project-tests/src/com.phdroid/smsb/activity/BlockedSmsListActivityTest.java
index 8b8d48a..d5e7b57 100644
--- a/project-tests/src/com.phdroid/smsb/activity/BlockedSmsListActivityTest.java
+++ b/project-tests/src/com.phdroid/smsb/activity/BlockedSmsListActivityTest.java
@@ -1,162 +1,162 @@
package com.phdroid.smsb.activity;
import android.test.ActivityInstrumentationTestCase2;
import android.view.KeyEvent;
import android.view.View;
import android.widget.Button;
import android.widget.ListView;
import com.phdroid.test.blackjack.Solo;
import com.phdroid.smsb.R;
import com.phdroid.smsb.storage.IMessageProvider;
import com.phdroid.smsb.storage.MessageProviderHelper;
import com.phdroid.smsb.storage.TestMessageProvider2;
public class BlockedSmsListActivityTest extends ActivityInstrumentationTestCase2<BlockedSmsListActivity> {
private BlockedSmsListActivity mActivity;
private ListView mList;
private SmsPojoArrayAdapter mAdapter;
private static final int ADAPTER_COUNT = 10;
private Solo mSolo;
public BlockedSmsListActivityTest(){
super("com.phdroid.smsb.activity.BlockedSmsListActivity", BlockedSmsListActivity.class);
}
public BlockedSmsListActivityTest(String pkg, Class activityClass) {
super(pkg, activityClass);
}
public BlockedSmsListActivityTest(Class activityClass) {
super(activityClass);
}
public void setUp() throws Exception {
MessageProviderHelper.setMessageProvider(new TestMessageProvider2());
super.setUp();
setActivityInitialTouchMode(false);
mActivity = getActivity();
mList = (ListView)mActivity.findViewById(R.id.messagesListView);
mAdapter = (SmsPojoArrayAdapter) mList.getAdapter();
mSolo = new Solo(getInstrumentation(), getActivity());
}
public void testPreConditions() {
assertTrue(mAdapter != null);
assertEquals(ADAPTER_COUNT, mAdapter.getCount());
}
public void test_undo_button_is_invisible_at_startup(){
View v = mActivity.findViewById(R.id.buttonLayout);
assertEquals(View.GONE, v.getVisibility());
}
public void test_pressing_DeleteAll_deletes_all_items(){
pressDeleteAllMenuItem();
assertEquals(0, mAdapter.getCount());
}
public void test_undo_button_is_visible_after_pressing_DeleteAll(){
pressDeleteAllMenuItem();
View v = mActivity.findViewById(R.id.buttonLayout);
assertEquals(View.VISIBLE, v.getVisibility());
}
public void test_undo_button_has_correct_text_after_pressing_DeleteAll(){
pressDeleteAllMenuItem();
Button b = (Button)mActivity.findViewById(R.id.undoButton);
assertEquals("10 messages were deleted. (Undo)", b.getText());
}
public void test_pressing_undo_button_after_deleting_all_returns_all_items_back(){
int originalCount = mAdapter.getCount();
pressDeleteAllMenuItem();
pressUndoButton();
assertEquals(originalCount, mAdapter.getCount());
}
public void test_pressing_select_many_button_opens_select_many_activity(){
pressSelectManyMenuItem();
assertEquals(SelectManyActivity.class, mSolo.getCurrentActivity().getClass());
}
public void test_pressing_settings_button_opens_settings_activity(){
pressSettingsMenuItem();
assertEquals(SettingsActivity.class, mSolo.getCurrentActivity().getClass());
}
public void test_pressing_message_opens_view_message_activity(){
pressMessage(3);
assertEquals(ViewMessageActivity.class, mSolo.getCurrentActivity().getClass());
}
public void test_actions_are_performed_when_navigating_to_select_many_activity(){
deleteFirstMessage();
pressSelectManyMenuItem();
sendKeys(KeyEvent.KEYCODE_BACK);
View v = mActivity.findViewById(R.id.buttonLayout);
assertEquals(View.GONE, v.getVisibility());
}
public void test_actions_are_performed_when_navigating_to_settings_activity(){
deleteFirstMessage();
pressSettingsMenuItem();
sendKeys(KeyEvent.KEYCODE_BACK);
View v = mActivity.findViewById(R.id.buttonLayout);
assertEquals(View.GONE, v.getVisibility());
}
public void test_actions_are_performed_when_navigating_to_view_message_activity(){
deleteFirstMessage();
pressMessage(2);
sendKeys(KeyEvent.KEYCODE_BACK);
View v = mActivity.findViewById(R.id.buttonLayout);
assertEquals(View.GONE, v.getVisibility());
}
private void deleteFirstMessage() {
- final IMessageProvider provider = MessageProviderHelper.getMessageProvider(this.getActivity().getContentResolver());
+ final IMessageProvider provider = MessageProviderHelper.getMessageProvider(this.getActivity(), this.getActivity(), this.getActivity().getContentResolver());
mActivity.runOnUiThread(new Runnable() {
public void run() {
provider.delete(0);
mAdapter.notifyDataSetChanged();
}
});
getInstrumentation().waitForIdleSync();
}
private void pressMessage(int messageIndex) {
for(int i = 0; i<messageIndex; i++){
sendKeys(KeyEvent.KEYCODE_DPAD_DOWN); // selecting first menu item (delete all)
}
sendKeys(KeyEvent.KEYCODE_DPAD_CENTER); // pressing selected item
getInstrumentation().waitForIdleSync();
}
private void pressUndoButton() {
final Button b = (Button)mActivity.findViewById(R.id.undoButton);
mActivity.runOnUiThread(new Runnable() {
public void run() {
b.performClick();
}
});
getInstrumentation().waitForIdleSync();
}
private void pressDeleteAllMenuItem() {
getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_MENU);
getInstrumentation().invokeMenuActionSync(mActivity, R.id.delete_all_item, 0);
}
private void pressSelectManyMenuItem() {
getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_MENU);
getInstrumentation().invokeMenuActionSync(mActivity, R.id.select_many_item, 0);
}
private void pressSettingsMenuItem() {
getInstrumentation().sendKeyDownUpSync(KeyEvent.KEYCODE_MENU);
getInstrumentation().invokeMenuActionSync(mActivity, R.id.settings_item, 0);
}
}
| true | true | private void deleteFirstMessage() {
final IMessageProvider provider = MessageProviderHelper.getMessageProvider(this.getActivity().getContentResolver());
mActivity.runOnUiThread(new Runnable() {
public void run() {
provider.delete(0);
mAdapter.notifyDataSetChanged();
}
});
getInstrumentation().waitForIdleSync();
}
| private void deleteFirstMessage() {
final IMessageProvider provider = MessageProviderHelper.getMessageProvider(this.getActivity(), this.getActivity(), this.getActivity().getContentResolver());
mActivity.runOnUiThread(new Runnable() {
public void run() {
provider.delete(0);
mAdapter.notifyDataSetChanged();
}
});
getInstrumentation().waitForIdleSync();
}
|
diff --git a/org.dawb.common.python/src/org/dawb/common/python/rpc/PythonService.java b/org.dawb.common.python/src/org/dawb/common/python/rpc/PythonService.java
index 4a99a0c8..c7cf6b10 100644
--- a/org.dawb.common.python/src/org/dawb/common/python/rpc/PythonService.java
+++ b/org.dawb.common.python/src/org/dawb/common/python/rpc/PythonService.java
@@ -1,281 +1,287 @@
/*-
* Copyright © 2011 Diamond Light Source Ltd.
*
* This file is part of GDA.
*
* GDA is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 3 as published by the Free
* Software Foundation.
*
* GDA is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along
* with GDA. If not, see <http://www.gnu.org/licenses/>.
*/
package org.dawb.common.python.rpc;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dawb.common.util.eclipse.BundleUtils;
import org.dawb.common.util.net.NetUtils;
import uk.ac.diamond.scisoft.analysis.rpc.AnalysisRpcClient;
import uk.ac.gda.util.OSUtils;
import com.isencia.util.commandline.ManagedCommandline;
import org.eclipse.core.variables.VariablesPlugin;
/**
* This class encapsulates a system command to python used with the RPC service
* and drives python using the diamond RPC link.
*
* It is a subset of the RPC interface availble by diamond pertinent to running
* python commands in the way jep does.
*/
public class PythonService {
private ManagedCommandline command;
private AnalysisRpcClient client;
private Thread stopThread;
/**
* Must use openConnection()
*/
private PythonService() {
}
/**
* Each call to this method starts a python process with
* a server waiting for commands. An RCP client is
* attached to it.
*
* @param command to start a python with numpy in. For instance
* 'python', 'python2.6', or the full path
*
* The port is started at 8613 and a free one is searched for.
* The property org.dawb.passerelle.actors.scripts.python.free.port
* many be used to change the start port if needed.
*
* This method also adds a shutdown hook to ensure that the service
* is stopped cleanly when the vm is shutdown. Calling the stop()
* method removes this shutdown hook.
*
* @return
*/
public static synchronized PythonService openConnection(final String pythonInterpreter) throws Exception {
final PythonService service = new PythonService();
- final String scisoftRpcPort = VariablesPlugin.getDefault().getStringVariableManager().performStringSubstitution("${scisoft_rpc_port}");
+ String scisoftRpcPort;
+ try {
+ // TODO Ensure plotting is started programatically in the GUI.
+ scisoftRpcPort = VariablesPlugin.getDefault().getStringVariableManager().performStringSubstitution("${scisoft_rpc_port}");
+ } catch (Exception ne) {
+ scisoftRpcPort = String.valueOf(0);
+ }
final int port = NetUtils.getFreePort(getServiceStartPort());
final File path = BundleUtils.getBundleLocation("org.dawb.common.python");
final String script;
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
script = path.getAbsolutePath()+"/src/org/dawb/common/python/rpc/python_service.py";
} else {
script = path.getAbsolutePath()+"/org/dawb/common/python/rpc/python_service.py";
}
service.command = new ManagedCommandline();
service.command.addArguments(new String[]{pythonInterpreter, "-u", script, String.valueOf(port), scisoftRpcPort});
// Ensure uk.ac.diamond.scisoft.python in PYTHONPATH
final Map<String,String> env = new HashMap<String,String>(System.getenv());
String pythonPath = env.get("PYTHONPATH");
StringBuilder pyBuf = pythonPath==null ? new StringBuilder() : new StringBuilder(pythonPath);
if (OSUtils.isWindowsOS()) pyBuf.append(";"); else pyBuf.append(":");
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath()+"/src");
} else {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath());
}
env.put("PYTHONPATH", pyBuf.toString());
service.command.setEnv(env);
// Currently log back python output directly to the log file.
service.command.setStreamLogsToLogging(true);
service.command.setWorkingDir(path);
service.command.execute();
service.stopThread = new Thread("Stop Python Service") {
public void run() {
service.stop();
}
};
Runtime.getRuntime().addShutdownHook(service.stopThread);
service.client = service.getActiveClient(port);
return service;
}
/**
* This call opens a client to the service already running. If you
* want you can run the python_serice.py in pydev then debug the commands
* as they come it. Use this method to get the PythonService Java side in
* that case. It will look for the running RPC service on the port passed
* in and allow commands to be run and debugged.
*
* @param port
* @return
* @throws Exception
*/
public static PythonService openClient(final int port) throws Exception {
final PythonService service = new PythonService();
service.client = service.getActiveClient(port);
return service;
}
/**
* Tries to connect to the service, only returning when connected.
* This is more reliable than waiting for a given time.
* @param port
* @return
* @throws InterruptedException
*/
private AnalysisRpcClient getActiveClient(int port) throws Exception {
if (!isRunning()) throw new Exception("The remote python process did not start!");
int count=0;
final int time = System.getProperty("org.dawb.common.python.rpc.service.timeout") !=null ?
Integer.parseInt(System.getProperty("org.dawb.common.python.rpc.service.timeout")) :
5000;
while(count<=time) {
try {
final AnalysisRpcClient client = new AnalysisRpcClient(port);
final Object active = client.request("isActive", new Object[]{"unused"}); // Calls the method 'run' in the script with the arguments
if ((((Boolean)active)).booleanValue()) return client;
Thread.sleep(100);
count+=100;
continue;
} catch (Exception ne) {
count+=100;
Thread.sleep(100);
continue;
}
}
throw new Exception("RPC connect to python timed out after "+time+"ms! Are you sure the python server is going?");
}
/**
* Will be null when openClient(port) is used.
* @return
*/
public ManagedCommandline getCommand() {
return command;
}
public AnalysisRpcClient getClient() {
return client;
}
public void stop() {
if (command==null) return;
if (command.getProcess()==null) return;
command.getProcess().destroy();
if (stopThread!=null) {
try {
Runtime.getRuntime().removeShutdownHook(stopThread);
} catch (Throwable ne) {
// We try to remove it but errors are not required if we fail because this method may
// be called during shutdown, when it will.
}
stopThread = null;
}
}
public boolean isRunning() {
if (command==null) return true; // Probably in debug mode
return !command.hasTerminated();
}
/**
* Convenience method for calling
* @param methodName
* @param arguments
* @param outputNames - names of global variables to read back from python
* @return
* @throws Exception
*/
public Map<String,? extends Object> runScript(String scriptFullPath, Map<String, ?> data, Collection<String> outputNames) throws Exception {
final List<String> additionalPaths = new ArrayList<String>(1);
additionalPaths.add(BundleUtils.getEclipseHome());
if (System.getenv("PYTHONPATH")!=null) {
additionalPaths.addAll(Arrays.asList(System.getenv("PYTHONPATH").split(":")));
}
final Object out = client.request("runScript", new Object[]{scriptFullPath, data, outputNames, additionalPaths});
// Calls the method 'runScript' in the script with the arguments
return (Map<String,? extends Object>)out;
}
/**
* Run an edna plugin with some xml and get some xml back.
*
* @param execDir
* @param pluginName
* @param ednaDebugMode
* @param xmlInputString
* @return
*/
public String runEdnaPlugin(final String execDir,
final String pluginName,
final boolean ednaDebugMode,
final String xmlInputString) throws Exception {
// We add fabio as an additional path to the service.
final List<String> additionalPaths = new ArrayList<String>(1);
additionalPaths.add(BundleUtils.getEclipseHome());
if (System.getenv("PYTHONPATH")!=null) {
additionalPaths.addAll(Arrays.asList(System.getenv("PYTHONPATH").split(":")));
}
final Object out = client.request("runEdnaPlugin", new Object[]{execDir, pluginName, ednaDebugMode, xmlInputString, additionalPaths});
// Calls the method 'runEdnaPlugin' in the script with the arguments
return (String)out;
}
public static int getDebugPort() {
int port = 8613;
if (System.getProperty("org.dawb.passerelle.actors.scripts.python.debug.port")!=null) {
// In an emergency allow the port to be changed for the debug session.
port = Integer.parseInt(System.getProperty("org.dawb.passerelle.actors.scripts.python.debug.port"));
}
return port;
}
/**
* Returns the port used to start the search for a free port in non-debug mode
* @return
*/
private static int getServiceStartPort() {
int port = 8613;
if (System.getProperty("org.dawb.passerelle.actors.scripts.python.free.port")!=null) {
// In an emergency allow the port to be changed for the debug session.
port = Integer.parseInt(System.getProperty("org.dawb.passerelle.actors.scripts.python.free.port"));
}
return port;
}
}
| true | true | public static synchronized PythonService openConnection(final String pythonInterpreter) throws Exception {
final PythonService service = new PythonService();
final String scisoftRpcPort = VariablesPlugin.getDefault().getStringVariableManager().performStringSubstitution("${scisoft_rpc_port}");
final int port = NetUtils.getFreePort(getServiceStartPort());
final File path = BundleUtils.getBundleLocation("org.dawb.common.python");
final String script;
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
script = path.getAbsolutePath()+"/src/org/dawb/common/python/rpc/python_service.py";
} else {
script = path.getAbsolutePath()+"/org/dawb/common/python/rpc/python_service.py";
}
service.command = new ManagedCommandline();
service.command.addArguments(new String[]{pythonInterpreter, "-u", script, String.valueOf(port), scisoftRpcPort});
// Ensure uk.ac.diamond.scisoft.python in PYTHONPATH
final Map<String,String> env = new HashMap<String,String>(System.getenv());
String pythonPath = env.get("PYTHONPATH");
StringBuilder pyBuf = pythonPath==null ? new StringBuilder() : new StringBuilder(pythonPath);
if (OSUtils.isWindowsOS()) pyBuf.append(";"); else pyBuf.append(":");
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath()+"/src");
} else {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath());
}
env.put("PYTHONPATH", pyBuf.toString());
service.command.setEnv(env);
// Currently log back python output directly to the log file.
service.command.setStreamLogsToLogging(true);
service.command.setWorkingDir(path);
service.command.execute();
service.stopThread = new Thread("Stop Python Service") {
public void run() {
service.stop();
}
};
Runtime.getRuntime().addShutdownHook(service.stopThread);
service.client = service.getActiveClient(port);
return service;
}
| public static synchronized PythonService openConnection(final String pythonInterpreter) throws Exception {
final PythonService service = new PythonService();
String scisoftRpcPort;
try {
// TODO Ensure plotting is started programatically in the GUI.
scisoftRpcPort = VariablesPlugin.getDefault().getStringVariableManager().performStringSubstitution("${scisoft_rpc_port}");
} catch (Exception ne) {
scisoftRpcPort = String.valueOf(0);
}
final int port = NetUtils.getFreePort(getServiceStartPort());
final File path = BundleUtils.getBundleLocation("org.dawb.common.python");
final String script;
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
script = path.getAbsolutePath()+"/src/org/dawb/common/python/rpc/python_service.py";
} else {
script = path.getAbsolutePath()+"/org/dawb/common/python/rpc/python_service.py";
}
service.command = new ManagedCommandline();
service.command.addArguments(new String[]{pythonInterpreter, "-u", script, String.valueOf(port), scisoftRpcPort});
// Ensure uk.ac.diamond.scisoft.python in PYTHONPATH
final Map<String,String> env = new HashMap<String,String>(System.getenv());
String pythonPath = env.get("PYTHONPATH");
StringBuilder pyBuf = pythonPath==null ? new StringBuilder() : new StringBuilder(pythonPath);
if (OSUtils.isWindowsOS()) pyBuf.append(";"); else pyBuf.append(":");
if (System.getProperty("eclipse.debug.session")!=null || System.getProperty("org.dawb.test.session")!=null) {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath()+"/src");
} else {
pyBuf.append(BundleUtils.getBundleLocation("uk.ac.diamond.scisoft.python").getAbsolutePath());
}
env.put("PYTHONPATH", pyBuf.toString());
service.command.setEnv(env);
// Currently log back python output directly to the log file.
service.command.setStreamLogsToLogging(true);
service.command.setWorkingDir(path);
service.command.execute();
service.stopThread = new Thread("Stop Python Service") {
public void run() {
service.stop();
}
};
Runtime.getRuntime().addShutdownHook(service.stopThread);
service.client = service.getActiveClient(port);
return service;
}
|
diff --git a/3rdPartyServices/StudentServices/RFID/RFIDClient3pApp/src/main/java/ac/hw/rfid/client/RfidClient.java b/3rdPartyServices/StudentServices/RFID/RFIDClient3pApp/src/main/java/ac/hw/rfid/client/RfidClient.java
index 95548314..1194032e 100644
--- a/3rdPartyServices/StudentServices/RFID/RFIDClient3pApp/src/main/java/ac/hw/rfid/client/RfidClient.java
+++ b/3rdPartyServices/StudentServices/RFID/RFIDClient3pApp/src/main/java/ac/hw/rfid/client/RfidClient.java
@@ -1,334 +1,334 @@
/**
* Copyright (c) 2011, SOCIETIES Consortium (WATERFORD INSTITUTE OF TECHNOLOGY (TSSG), HERIOT-WATT UNIVERSITY (HWU), SOLUTA.NET
* (SN), GERMAN AEROSPACE CENTRE (Deutsches Zentrum fuer Luft- und Raumfahrt e.V.) (DLR), Zavod za varnostne tehnologije
* informacijske družbe in elektronsko poslovanje (SETCCE), INSTITUTE OF COMMUNICATION AND COMPUTER SYSTEMS (ICCS), LAKE
* COMMUNICATIONS (LAKE), INTEL PERFORMANCE LEARNING SOLUTIONS LTD (INTEL), PORTUGAL TELECOM INOVAÇÃO, SA (PTIN), IBM Corp.,
* INSTITUT TELECOM (ITSUD), AMITEC DIACHYTI EFYIA PLIROFORIKI KAI EPIKINONIES ETERIA PERIORISMENIS EFTHINIS (AMITEC), TELECOM
* ITALIA S.p.a.(TI), TRIALOG (TRIALOG), Stiftelsen SINTEF (SINTEF), NEC EUROPE LTD (NEC))
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ac.hw.rfid.client;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.societies.api.comm.xmpp.interfaces.ICommManager;
import org.societies.api.context.broker.ICtxBroker;
import org.societies.api.context.model.CtxAttributeTypes;
import org.societies.api.context.source.ICtxSourceMgr;
import org.societies.api.identity.IIdentity;
import org.societies.api.identity.IIdentityManager;
import org.societies.api.identity.RequestorService;
import org.societies.api.osgi.event.CSSEvent;
import org.societies.api.osgi.event.CSSEventConstants;
import org.societies.api.osgi.event.EventListener;
import org.societies.api.osgi.event.EventTypes;
import org.societies.api.osgi.event.IEventMgr;
import org.societies.api.osgi.event.InternalEvent;
import org.societies.api.schema.servicelifecycle.model.ServiceResourceIdentifier;
import org.societies.api.services.IServices;
import org.societies.api.services.ServiceMgmtEvent;
import org.societies.api.services.ServiceMgmtEventType;
import ac.hw.rfid.client.api.IRfidClient;
import ac.hw.rfid.server.api.remote.IRfidServer;
/**
* @author Eliza Papadopoulou
* @created December, 2010
*/
public class RfidClient extends EventListener implements IRfidClient {
private ICommManager commManager;
private Logger logging = LoggerFactory.getLogger(this.getClass());
private ICtxSourceMgr ctxSourceMgr;
private IIdentityManager idm;
/*
* serviceID of RfidClient
*/
private ServiceResourceIdentifier clientID;
/*
* my LOCAL DPI
*/
private IIdentity userIdentity;
private ClientGUIFrame clientGUI;
private IIdentity serverIdentity;
private IRfidServer rfidServerRemote;
private String myCtxSourceId;
private ICtxBroker ctxBroker;
private IEventMgr evMgr;
private IServices services;
private RequestorService requestor;
public RfidClient() {
}
public void close() {
this.clientGUI.setVisible(false);
this.clientGUI.dispose();
}
public void initialiseRFIDClient() {
this.registerForSLMEvents();
}
private void registerForSLMEvents() {
String eventFilter = "(&" +
"(" + CSSEventConstants.EVENT_NAME + "="+ServiceMgmtEventType.NEW_SERVICE+")" +
"(" + CSSEventConstants.EVENT_SOURCE + "=org/societies/servicelifecycle)" +
")";
this.getEvMgr().subscribeInternalEvent(this, new String[]{EventTypes.SERVICE_LIFECYCLE_EVENT}, eventFilter);
this.logging.debug("Subscribed to "+EventTypes.SERVICE_LIFECYCLE_EVENT+" events");
}
private void unRegisterFromSLMEvents()
{
String eventFilter = "(&" +
"(" + CSSEventConstants.EVENT_NAME + "="+ServiceMgmtEventType.NEW_SERVICE+")" +
"(" + CSSEventConstants.EVENT_SOURCE + "=org/societies/servicelifecycle)" +
")";
this.evMgr.unSubscribeInternalEvent(this, new String[]{EventTypes.SERVICE_LIFECYCLE_EVENT}, eventFilter);
//this.evMgr.subscribeInternalEvent(this, new String[]{EventTypes.SERVICE_LIFECYCLE_EVENT}, eventFilter);
this.logging.debug("Unsubscribed from "+EventTypes.SERVICE_LIFECYCLE_EVENT+" events");
}
private boolean register(){
try {
Future<String> fID = this.ctxSourceMgr.register("RFID", CtxAttributeTypes.LOCATION_SYMBOLIC);
myCtxSourceId = fID.get();
return true;
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (ExecutionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
}
@Override
public void sendUpdate(String symLoc, String tagNumber) {
this.clientGUI.sendSymLocUpdate(tagNumber, symLoc);
this.clientGUI.tfTagNumber.setText(tagNumber);
if (this.myCtxSourceId==null){
boolean registered = this.register();
if (registered){
this.ctxSourceMgr.sendUpdate(this.myCtxSourceId, symLoc);
this.logging.debug("Sent new RFID information");
}else{
this.logging.debug("Received symloc update: "+symLoc+" but unable to register as a context source with the ICtxSourceMgr.");
}
}else{
//this.ctxSourceMgr.sendUpdate(this.myCtxSourceId, symLoc);
this.ctxSourceMgr.sendUpdate(myCtxSourceId, symLoc, null, false, 1.0, 1d/60);
this.logging.debug("Sent new RFID information");
}
}
@Override
public void acknowledgeRegistration(Integer rStatus) {
this.clientGUI.acknowledgeRegistration(rStatus);
}
/**
* @return the commManager
*/
public ICommManager getCommManager() {
return commManager;
}
/**
* @param commManager the commManager to set
*/
public void setCommManager(ICommManager commManager) {
this.commManager = commManager;
}
/**
* @return the ctxSourceMgr
*/
public ICtxSourceMgr getCtxSourceMgr() {
return ctxSourceMgr;
}
/**
* @param ctxSourceMgr the ctxSourceMgr to set
*/
public void setCtxSourceMgr(ICtxSourceMgr ctxSourceMgr) {
this.ctxSourceMgr = ctxSourceMgr;
}
/**
* @return the rfidServer
*/
public IRfidServer getRfidServerRemote() {
return rfidServerRemote;
}
/**
* @param rfidServer the rfidServer to set
*/
public void setRfidServerRemote(IRfidServer rfidServer) {
this.rfidServerRemote = rfidServer;
}
/**
* @return the ctxBroker
*/
public ICtxBroker getCtxBroker() {
return ctxBroker;
}
/**
* @param ctxBroker the ctxBroker to set
*/
public void setCtxBroker(ICtxBroker ctxBroker) {
this.ctxBroker = ctxBroker;
}
@Override
public void handleInternalEvent(InternalEvent event) {
ServiceMgmtEvent slmEvent = (ServiceMgmtEvent) event.geteventInfo();
- if (slmEvent.getBundleSymbolName().equalsIgnoreCase("ac.hw.display.DisplayPortalClientApp")){
+ if (slmEvent.getBundleSymbolName().equalsIgnoreCase("ac.hw.rfid.RFIDClientApp")){
this.logging.debug("Received SLM event for my bundle");
if (slmEvent.getEventType().equals(ServiceMgmtEventType.NEW_SERVICE)){
ServiceResourceIdentifier myClientServiceID = slmEvent.getServiceId();
this.serverIdentity = getServices().getServer(myClientServiceID);
this.logging.debug("Retrieved my server's identity: "+this.serverIdentity.getJid());
//this.requestServerIdentityFromUser();
//ServiceResourceIdentifier serviceId = this.portalServerRemote.getServerServiceId(serverIdentity);
//UIManager.put("ClassLoader", ClassLoader.getSystemClassLoader());
ServiceResourceIdentifier serviceId = this.getServices().getServerServiceIdentifier(myClientServiceID);
this.logging.debug("Retrieved my server's serviceID: "+serviceId.getIdentifier().toASCIIString());
this.requestor = new RequestorService(serverIdentity, serviceId);
boolean registered = this.register();
if (registered){
clientGUI = new ClientGUIFrame(this.rfidServerRemote, this.getCtxBroker(), this.userIdentity, this.serverIdentity, clientID);
}else{
this.logging.debug("unable to register as a context source with the ICtxSourceMgr");
}
this.unRegisterFromSLMEvents();
}
}else{
this.logging.debug("Received SLM event but it wasn't related to my bundle");
}
}
@Override
public void handleExternalEvent(CSSEvent event) {
// TODO Auto-generated method stub
}
public IEventMgr getEvMgr() {
return evMgr;
}
public void setEvMgr(IEventMgr evMgr) {
this.evMgr = evMgr;
}
public IServices getServices() {
return services;
}
public void setServices(IServices services) {
this.services = services;
}
}
| true | true | public void handleInternalEvent(InternalEvent event) {
ServiceMgmtEvent slmEvent = (ServiceMgmtEvent) event.geteventInfo();
if (slmEvent.getBundleSymbolName().equalsIgnoreCase("ac.hw.display.DisplayPortalClientApp")){
this.logging.debug("Received SLM event for my bundle");
if (slmEvent.getEventType().equals(ServiceMgmtEventType.NEW_SERVICE)){
ServiceResourceIdentifier myClientServiceID = slmEvent.getServiceId();
this.serverIdentity = getServices().getServer(myClientServiceID);
this.logging.debug("Retrieved my server's identity: "+this.serverIdentity.getJid());
//this.requestServerIdentityFromUser();
//ServiceResourceIdentifier serviceId = this.portalServerRemote.getServerServiceId(serverIdentity);
//UIManager.put("ClassLoader", ClassLoader.getSystemClassLoader());
ServiceResourceIdentifier serviceId = this.getServices().getServerServiceIdentifier(myClientServiceID);
this.logging.debug("Retrieved my server's serviceID: "+serviceId.getIdentifier().toASCIIString());
this.requestor = new RequestorService(serverIdentity, serviceId);
boolean registered = this.register();
if (registered){
clientGUI = new ClientGUIFrame(this.rfidServerRemote, this.getCtxBroker(), this.userIdentity, this.serverIdentity, clientID);
}else{
this.logging.debug("unable to register as a context source with the ICtxSourceMgr");
}
this.unRegisterFromSLMEvents();
}
}else{
this.logging.debug("Received SLM event but it wasn't related to my bundle");
}
}
| public void handleInternalEvent(InternalEvent event) {
ServiceMgmtEvent slmEvent = (ServiceMgmtEvent) event.geteventInfo();
if (slmEvent.getBundleSymbolName().equalsIgnoreCase("ac.hw.rfid.RFIDClientApp")){
this.logging.debug("Received SLM event for my bundle");
if (slmEvent.getEventType().equals(ServiceMgmtEventType.NEW_SERVICE)){
ServiceResourceIdentifier myClientServiceID = slmEvent.getServiceId();
this.serverIdentity = getServices().getServer(myClientServiceID);
this.logging.debug("Retrieved my server's identity: "+this.serverIdentity.getJid());
//this.requestServerIdentityFromUser();
//ServiceResourceIdentifier serviceId = this.portalServerRemote.getServerServiceId(serverIdentity);
//UIManager.put("ClassLoader", ClassLoader.getSystemClassLoader());
ServiceResourceIdentifier serviceId = this.getServices().getServerServiceIdentifier(myClientServiceID);
this.logging.debug("Retrieved my server's serviceID: "+serviceId.getIdentifier().toASCIIString());
this.requestor = new RequestorService(serverIdentity, serviceId);
boolean registered = this.register();
if (registered){
clientGUI = new ClientGUIFrame(this.rfidServerRemote, this.getCtxBroker(), this.userIdentity, this.serverIdentity, clientID);
}else{
this.logging.debug("unable to register as a context source with the ICtxSourceMgr");
}
this.unRegisterFromSLMEvents();
}
}else{
this.logging.debug("Received SLM event but it wasn't related to my bundle");
}
}
|
diff --git a/src/lab3/Main.java b/src/lab3/Main.java
index 902f8e9..53ad9f1 100644
--- a/src/lab3/Main.java
+++ b/src/lab3/Main.java
@@ -1,49 +1,56 @@
package lab3;
import static fj.data.List.list;
/**
* Main class with demo {@link Plane} and {@link AirlineCompany} usage
*/
public class Main {
public static void main(String[] args) {
// 1. Create planes using builders
// name, capacity, load, flightDistance, fuelPerHour, units
- Plane heinkel = new BomberBuilder().setName("Heinkel").setCapacity(100).setLoad(1000)
- .setFlightDistance(10).setFuelPerHour(15).setBombs(14).createBomber();
+ Plane heinkel = new BomberBuilder().setName("Heinkel").setCapacity(100)
+ .setLoad(1000).setFlightDistance(10).setFuelPerHour(15)
+ .setBombs(14).createBomber();
- Plane douglas = new BomberBuilder().setName("Douglas").setCapacity(110).setLoad(1012)
- .setFlightDistance(13).setFuelPerHour(14).setBombs(13).createBomber();
+ Plane douglas = new BomberBuilder().setName("Douglas").setCapacity(110)
+ .setLoad(1012).setFlightDistance(13).setFuelPerHour(14)
+ .setBombs(13).createBomber();
- Plane messershmidt = new FighterBuilder().setName("Messershmidt").setCapacity(11).setLoad
- (130).setFlightDistance(15).setFuelPerHour(8).setBullets(1240).createFighter();
+ Plane messershmidt = new FighterBuilder().setName("Messershmidt")
+ .setCapacity(11).setLoad(130).setFlightDistance(15).
+ setFuelPerHour(8).setBullets(1240).createFighter();
- Plane la7 = new FighterBuilder().setName("La-7").setCapacity(13).setLoad(150)
- .setFlightDistance(13).setFuelPerHour(9).setBullets(800).createFighter();
+ Plane la7 = new FighterBuilder().setName("La-7").setCapacity(13)
+ .setLoad(150).setFlightDistance(13).setFuelPerHour(9)
+ .setBullets(800).createFighter();
- Plane boeing = new TransportPlaneBuilder().setName("Boeing").setCapacity(180).setLoad
- (1104).setFlightDistance(20).setFuelPerHour(20).setSeats(124).createTransportPlane();
+ Plane boeing = new TransportPlaneBuilder().setName("Boeing")
+ .setCapacity(180).setLoad(1104).setFlightDistance(20)
+ .setFuelPerHour(20).setSeats(124).createTransportPlane();
// 2. Create company with list of planes above
AirlineCompany aeroSvit = new AirlineCompany("AeroSvit", list(
heinkel, douglas, messershmidt, la7, boeing
));
// 3. Output some information about this company and its planes
System.out.println(aeroSvit);
System.out.println("Plane with this maximum flying distance - " +
aeroSvit.getMaxDistancePlane());
final int minFuelPerHour = 6;
final int maxFuelPerHour = 9;
- final Plane planeWithFuelBetween = aeroSvit.getPlaneWithFuelBetween(minFuelPerHour, maxFuelPerHour);
- System.out.print("Plane with fuel per hour usage between " + minFuelPerHour + " and " +
+ final Plane planeWithFuelBetween = aeroSvit.getPlaneWithFuelBetween
+ (minFuelPerHour, maxFuelPerHour);
+ System.out.print("Plane with fuel per hour usage between " +
+ minFuelPerHour + " and " +
maxFuelPerHour);
if (planeWithFuelBetween != null) {
System.out.println(" - " + planeWithFuelBetween);
} else {
System.out.println(" not found!");
}
}
}
| false | true | public static void main(String[] args) {
// 1. Create planes using builders
// name, capacity, load, flightDistance, fuelPerHour, units
Plane heinkel = new BomberBuilder().setName("Heinkel").setCapacity(100).setLoad(1000)
.setFlightDistance(10).setFuelPerHour(15).setBombs(14).createBomber();
Plane douglas = new BomberBuilder().setName("Douglas").setCapacity(110).setLoad(1012)
.setFlightDistance(13).setFuelPerHour(14).setBombs(13).createBomber();
Plane messershmidt = new FighterBuilder().setName("Messershmidt").setCapacity(11).setLoad
(130).setFlightDistance(15).setFuelPerHour(8).setBullets(1240).createFighter();
Plane la7 = new FighterBuilder().setName("La-7").setCapacity(13).setLoad(150)
.setFlightDistance(13).setFuelPerHour(9).setBullets(800).createFighter();
Plane boeing = new TransportPlaneBuilder().setName("Boeing").setCapacity(180).setLoad
(1104).setFlightDistance(20).setFuelPerHour(20).setSeats(124).createTransportPlane();
// 2. Create company with list of planes above
AirlineCompany aeroSvit = new AirlineCompany("AeroSvit", list(
heinkel, douglas, messershmidt, la7, boeing
));
// 3. Output some information about this company and its planes
System.out.println(aeroSvit);
System.out.println("Plane with this maximum flying distance - " +
aeroSvit.getMaxDistancePlane());
final int minFuelPerHour = 6;
final int maxFuelPerHour = 9;
final Plane planeWithFuelBetween = aeroSvit.getPlaneWithFuelBetween(minFuelPerHour, maxFuelPerHour);
System.out.print("Plane with fuel per hour usage between " + minFuelPerHour + " and " +
maxFuelPerHour);
if (planeWithFuelBetween != null) {
System.out.println(" - " + planeWithFuelBetween);
} else {
System.out.println(" not found!");
}
}
| public static void main(String[] args) {
// 1. Create planes using builders
// name, capacity, load, flightDistance, fuelPerHour, units
Plane heinkel = new BomberBuilder().setName("Heinkel").setCapacity(100)
.setLoad(1000).setFlightDistance(10).setFuelPerHour(15)
.setBombs(14).createBomber();
Plane douglas = new BomberBuilder().setName("Douglas").setCapacity(110)
.setLoad(1012).setFlightDistance(13).setFuelPerHour(14)
.setBombs(13).createBomber();
Plane messershmidt = new FighterBuilder().setName("Messershmidt")
.setCapacity(11).setLoad(130).setFlightDistance(15).
setFuelPerHour(8).setBullets(1240).createFighter();
Plane la7 = new FighterBuilder().setName("La-7").setCapacity(13)
.setLoad(150).setFlightDistance(13).setFuelPerHour(9)
.setBullets(800).createFighter();
Plane boeing = new TransportPlaneBuilder().setName("Boeing")
.setCapacity(180).setLoad(1104).setFlightDistance(20)
.setFuelPerHour(20).setSeats(124).createTransportPlane();
// 2. Create company with list of planes above
AirlineCompany aeroSvit = new AirlineCompany("AeroSvit", list(
heinkel, douglas, messershmidt, la7, boeing
));
// 3. Output some information about this company and its planes
System.out.println(aeroSvit);
System.out.println("Plane with this maximum flying distance - " +
aeroSvit.getMaxDistancePlane());
final int minFuelPerHour = 6;
final int maxFuelPerHour = 9;
final Plane planeWithFuelBetween = aeroSvit.getPlaneWithFuelBetween
(minFuelPerHour, maxFuelPerHour);
System.out.print("Plane with fuel per hour usage between " +
minFuelPerHour + " and " +
maxFuelPerHour);
if (planeWithFuelBetween != null) {
System.out.println(" - " + planeWithFuelBetween);
} else {
System.out.println(" not found!");
}
}
|
diff --git a/mmstudio/src/org/micromanager/acquisition/engine/ImageTask.java b/mmstudio/src/org/micromanager/acquisition/engine/ImageTask.java
index b41601c49..f5e4f75f2 100644
--- a/mmstudio/src/org/micromanager/acquisition/engine/ImageTask.java
+++ b/mmstudio/src/org/micromanager/acquisition/engine/ImageTask.java
@@ -1,329 +1,329 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.micromanager.acquisition.engine;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.micromanager.api.Autofocus;
import org.micromanager.api.EngineTask;
import java.util.HashMap;
import mmcorej.CMMCore;
import mmcorej.Configuration;
import mmcorej.TaggedImage;
import org.micromanager.navigation.MultiStagePosition;
import org.micromanager.navigation.StagePosition;
import org.micromanager.utils.JavaUtils;
import org.micromanager.utils.MDUtils;
import org.micromanager.utils.ReportingUtils;
/**
*
* @author arthur
*/
public class ImageTask implements EngineTask {
private final ImageRequest imageRequest_;
private final Engine eng_;
private final CMMCore core_;
private boolean stopRequested_;
private boolean pauseRequested_;
boolean setZPosition_ = false;
private final HashMap<String, String> md_;
private double zPosition_;
private final SimpleDateFormat iso8601modified;
ImageTask(Engine eng, ImageRequest imageRequest) {
eng_ = eng;
core_ = eng.core_;
imageRequest_ = imageRequest;
stopRequested_ = false;
md_ = new HashMap<String, String>();
iso8601modified = new SimpleDateFormat("yyyy-MM-dd E HH:mm:ss Z");
}
private void log(String msg) {
ReportingUtils.logMessage("ImageTask: " + msg);
}
public void run() {
if (!isStopRequested()) {
updateChannel();
}
if (!isStopRequested()) {
updatePosition();
}
if (!isStopRequested()) {
sleep();
}
if (!isStopRequested()) {
autofocus();
}
if (!isStopRequested()) {
updateSlice();
}
if (!isStopRequested()) {
acquireImage();
}
}
void updateChannel() {
if (imageRequest_.UseChannel) {
try {
core_.setExposure(imageRequest_.Channel.exposure_);
imageRequest_.exposure = imageRequest_.Channel.exposure_;
String chanGroup = imageRequest_.Channel.name_;
if (chanGroup.length() == 0) {
chanGroup = core_.getChannelGroup();
}
core_.setConfig(chanGroup, imageRequest_.Channel.config_);
core_.waitForConfig(chanGroup,imageRequest_.Channel.config_);
log("channel set");
} catch (Exception ex) {
ReportingUtils.logError(ex, "Channel setting failed.");
}
}
}
void updateSlice() {
try {
if (imageRequest_.UseSlice) {
setZPosition_ = true;
if (imageRequest_.relativeZSlices) {
zPosition_ += imageRequest_.SlicePosition;
System.out.println(zPosition_);
} else {
zPosition_ = imageRequest_.SlicePosition;
}
} else {
zPosition_ = core_.getPosition(core_.getFocusDevice());
}
if (imageRequest_.UseChannel) {
setZPosition_ = true;
zPosition_ += imageRequest_.Channel.zOffset_;
}
if (setZPosition_) {
imageRequest_.zPosition = zPosition_;
core_.setPosition(core_.getFocusDevice(), zPosition_);
core_.waitForDevice(core_.getFocusDevice());
}
} catch (Exception e) {
ReportingUtils.logError(e);
}
}
void updatePosition() {
try {
zPosition_ = imageRequest_.zReference;
if (imageRequest_.UsePosition) {
MultiStagePosition msp = imageRequest_.Position;
for (int i = 0; i < msp.size(); ++i) {
StagePosition sp = msp.get(i);
if (sp.numAxes == 1) {
if (sp.stageName.equals(core_.getFocusDevice())) {
zPosition_ = sp.x; // Surprisingly it should be sp.x!
setZPosition_ = true;
} else {
core_.setPosition(sp.stageName, sp.x);
core_.waitForDevice(sp.stageName);
MDUtils.put(md_,"Acquisition-"+sp.stageName+"RequestedZPosition", sp.x);
}
} else if (sp.numAxes == 2) {
core_.setXYPosition(sp.stageName, sp.x, sp.y);
core_.waitForDevice(sp.stageName);
MDUtils.put(md_,"Acquisition-"+sp.stageName+"RequestedXPosition", sp.x);
MDUtils.put(md_,"Acquisition-"+sp.stageName+"RequestedYPosition", sp.y);
}
log("position set\n");
}
}
core_.waitForDevice(core_.getFocusDevice());
} catch (Exception ex) {
ReportingUtils.logError(ex, "Set position failed.");
}
}
public synchronized void sleep() {
if (imageRequest_.UseFrame) {
while (!stopRequested_ && eng_.lastWakeTime_ > 0) {
double sleepTime = (eng_.lastWakeTime_ + imageRequest_.WaitTime) - (System.nanoTime() / 1000000);
if (sleepTime > 0) {
try {
wait((long) sleepTime);
} catch (InterruptedException ex) {
ReportingUtils.logError(ex);
}
} else {
if (imageRequest_.WaitTime > 0) {
MDUtils.put(md_, "Acquisition-TimingState", "Lagging");
}
break;
}
}
log("wait finished");
eng_.lastWakeTime_ = (System.nanoTime() / 1000000);
}
}
public void autofocus() {
String afResult = "AutofocusResult";
StagePosition sp;
Autofocus afDevice;
if (imageRequest_.AutoFocus) {
try {
String focusDevice = core_.getFocusDevice();
core_.setPosition(focusDevice, zPosition_);
core_.waitForDevice(focusDevice);
afDevice = eng_.getAutofocusManager().getDevice();
afDevice.fullFocus();
MDUtils.put(md_, afResult, "Success");
if (imageRequest_.UsePosition) {
sp = imageRequest_.Position.get(core_.getFocusDevice());
if (sp != null)
sp.x = core_.getPosition(core_.getFocusDevice());
}
zPosition_ = core_.getPosition(focusDevice);
core_.waitForDevice(focusDevice);
} catch (Exception ex) {
ReportingUtils.logError(ex);
MDUtils.put(md_,"AutofocusResult","Failure");
}
}
}
void acquireImage() {
//Gson gson = new Gson();
//String jsonMetadata = gson.toJson(imageRequest_);
waitDuringPause();
MDUtils.put(md_, "Slice", imageRequest_.SliceIndex);
if (imageRequest_.UseChannel) {
MDUtils.put(md_, "Channel", imageRequest_.Channel.config_);
}
MDUtils.put(md_, "PositionIndex", imageRequest_.PositionIndex);
MDUtils.put(md_, "ChannelIndex", imageRequest_.ChannelIndex);
MDUtils.put(md_, "Frame", imageRequest_.FrameIndex);
if (imageRequest_.UsePosition) {
MDUtils.put(md_, "PositionName", imageRequest_.Position.getLabel());
}
MDUtils.put(md_, "SlicePosition", imageRequest_.SlicePosition);
long bits = core_.getBytesPerPixel() * 8;
String lbl = "";
if (core_.getNumberOfComponents() == 1) {
lbl = "GRAY";
} else if (core_.getNumberOfComponents() == 4) {
lbl = "RGB";
}
MDUtils.put(md_, "Exposure-ms", imageRequest_.exposure);
MDUtils.put(md_, "PixelSizeUm", core_.getPixelSizeUm());
try {
MDUtils.put(md_, "ZPositionUm", core_.getPosition(core_.getFocusDevice()));
} catch (Exception ex) {
ReportingUtils.logError(ex);
MDUtils.put(md_, "ZPositionUm", "");
}
MDUtils.put(md_, "Image-PixelType", lbl + bits);
try {
MDUtils.setWidth(md_, (int) core_.getImageWidth());
MDUtils.setHeight(md_, (int) core_.getImageHeight());
} catch (Exception e) {
ReportingUtils.logError(e);
}
long dTime = System.nanoTime() - eng_.getStartTimeNs();
MDUtils.put(md_, "ElapsedTime-ms", ((double) dTime) / 1e9);
try {
core_.waitForDevice(core_.getShutterDevice());
if (core_.getAutoShutter())
core_.setAutoShutter(false);
if (eng_.autoShutterSelected_ && !core_.getShutterOpen()) {
core_.setShutterOpen(true);
log("opened shutter");
}
Object pixels;
if (!imageRequest_.collectBurst) {
core_.snapImage(); //Should be: core_.snapImage(jsonMetadata);
log("snapped image");
if (eng_.autoShutterSelected_ && imageRequest_.CloseShutter) {
core_.waitForDevice(core_.getShutterDevice());
core_.setShutterOpen(false);
log("closed shutter");
}
pixels = core_.getImage();
} else {
if (imageRequest_.startBurstN > 0) {
if (eng_.autoShutterSelected_)
core_.setAutoShutter(true);
core_.startSequenceAcquisition(imageRequest_.startBurstN,
0, false);
log("started a burst with " + imageRequest_.startBurstN + " images.");
}
while (core_.getRemainingImageCount() == 0)
JavaUtils.sleep(5);
pixels = core_.popNextImage();
log("collected burst image");
}
MDUtils.put(md_, "Source",core_.getCameraDevice());
Configuration config = core_.getSystemStateCache();
MDUtils.addConfiguration(md_, config);
if (imageRequest_.NextWaitTime > 0) {
long nextFrameTimeMs = (long) (imageRequest_.NextWaitTime + eng_.lastWakeTime_);
MDUtils.put(md_, "NextFrameTimeMs", nextFrameTimeMs);
}
MDUtils.addRandomUUID(md_);
- MDUtils.put("Time", iso8601modified.format(new Date()));
+ MDUtils.put(md_, "Time", iso8601modified.format(new Date()));
TaggedImage taggedImage = new TaggedImage(pixels, md_);
eng_.imageReceivingQueue_.add(taggedImage);
} catch (Exception ex) {
ReportingUtils.logError(ex);
}
}
public synchronized void requestStop() {
stopRequested_ = true;
notify();
}
public synchronized void requestPause() {
pauseRequested_ = true;
}
public synchronized void requestResume() {
pauseRequested_ = false;
this.notify();
}
private synchronized boolean isPauseRequested() {
return pauseRequested_;
}
private synchronized void waitDuringPause() {
try {
if (isPauseRequested()) {
wait();
}
} catch (InterruptedException ex) {
ReportingUtils.logError(ex);
}
}
private synchronized boolean isStopRequested() {
return stopRequested_;
}
}
| true | true | void acquireImage() {
//Gson gson = new Gson();
//String jsonMetadata = gson.toJson(imageRequest_);
waitDuringPause();
MDUtils.put(md_, "Slice", imageRequest_.SliceIndex);
if (imageRequest_.UseChannel) {
MDUtils.put(md_, "Channel", imageRequest_.Channel.config_);
}
MDUtils.put(md_, "PositionIndex", imageRequest_.PositionIndex);
MDUtils.put(md_, "ChannelIndex", imageRequest_.ChannelIndex);
MDUtils.put(md_, "Frame", imageRequest_.FrameIndex);
if (imageRequest_.UsePosition) {
MDUtils.put(md_, "PositionName", imageRequest_.Position.getLabel());
}
MDUtils.put(md_, "SlicePosition", imageRequest_.SlicePosition);
long bits = core_.getBytesPerPixel() * 8;
String lbl = "";
if (core_.getNumberOfComponents() == 1) {
lbl = "GRAY";
} else if (core_.getNumberOfComponents() == 4) {
lbl = "RGB";
}
MDUtils.put(md_, "Exposure-ms", imageRequest_.exposure);
MDUtils.put(md_, "PixelSizeUm", core_.getPixelSizeUm());
try {
MDUtils.put(md_, "ZPositionUm", core_.getPosition(core_.getFocusDevice()));
} catch (Exception ex) {
ReportingUtils.logError(ex);
MDUtils.put(md_, "ZPositionUm", "");
}
MDUtils.put(md_, "Image-PixelType", lbl + bits);
try {
MDUtils.setWidth(md_, (int) core_.getImageWidth());
MDUtils.setHeight(md_, (int) core_.getImageHeight());
} catch (Exception e) {
ReportingUtils.logError(e);
}
long dTime = System.nanoTime() - eng_.getStartTimeNs();
MDUtils.put(md_, "ElapsedTime-ms", ((double) dTime) / 1e9);
try {
core_.waitForDevice(core_.getShutterDevice());
if (core_.getAutoShutter())
core_.setAutoShutter(false);
if (eng_.autoShutterSelected_ && !core_.getShutterOpen()) {
core_.setShutterOpen(true);
log("opened shutter");
}
Object pixels;
if (!imageRequest_.collectBurst) {
core_.snapImage(); //Should be: core_.snapImage(jsonMetadata);
log("snapped image");
if (eng_.autoShutterSelected_ && imageRequest_.CloseShutter) {
core_.waitForDevice(core_.getShutterDevice());
core_.setShutterOpen(false);
log("closed shutter");
}
pixels = core_.getImage();
} else {
if (imageRequest_.startBurstN > 0) {
if (eng_.autoShutterSelected_)
core_.setAutoShutter(true);
core_.startSequenceAcquisition(imageRequest_.startBurstN,
0, false);
log("started a burst with " + imageRequest_.startBurstN + " images.");
}
while (core_.getRemainingImageCount() == 0)
JavaUtils.sleep(5);
pixels = core_.popNextImage();
log("collected burst image");
}
MDUtils.put(md_, "Source",core_.getCameraDevice());
Configuration config = core_.getSystemStateCache();
MDUtils.addConfiguration(md_, config);
if (imageRequest_.NextWaitTime > 0) {
long nextFrameTimeMs = (long) (imageRequest_.NextWaitTime + eng_.lastWakeTime_);
MDUtils.put(md_, "NextFrameTimeMs", nextFrameTimeMs);
}
MDUtils.addRandomUUID(md_);
MDUtils.put("Time", iso8601modified.format(new Date()));
TaggedImage taggedImage = new TaggedImage(pixels, md_);
eng_.imageReceivingQueue_.add(taggedImage);
} catch (Exception ex) {
ReportingUtils.logError(ex);
}
}
| void acquireImage() {
//Gson gson = new Gson();
//String jsonMetadata = gson.toJson(imageRequest_);
waitDuringPause();
MDUtils.put(md_, "Slice", imageRequest_.SliceIndex);
if (imageRequest_.UseChannel) {
MDUtils.put(md_, "Channel", imageRequest_.Channel.config_);
}
MDUtils.put(md_, "PositionIndex", imageRequest_.PositionIndex);
MDUtils.put(md_, "ChannelIndex", imageRequest_.ChannelIndex);
MDUtils.put(md_, "Frame", imageRequest_.FrameIndex);
if (imageRequest_.UsePosition) {
MDUtils.put(md_, "PositionName", imageRequest_.Position.getLabel());
}
MDUtils.put(md_, "SlicePosition", imageRequest_.SlicePosition);
long bits = core_.getBytesPerPixel() * 8;
String lbl = "";
if (core_.getNumberOfComponents() == 1) {
lbl = "GRAY";
} else if (core_.getNumberOfComponents() == 4) {
lbl = "RGB";
}
MDUtils.put(md_, "Exposure-ms", imageRequest_.exposure);
MDUtils.put(md_, "PixelSizeUm", core_.getPixelSizeUm());
try {
MDUtils.put(md_, "ZPositionUm", core_.getPosition(core_.getFocusDevice()));
} catch (Exception ex) {
ReportingUtils.logError(ex);
MDUtils.put(md_, "ZPositionUm", "");
}
MDUtils.put(md_, "Image-PixelType", lbl + bits);
try {
MDUtils.setWidth(md_, (int) core_.getImageWidth());
MDUtils.setHeight(md_, (int) core_.getImageHeight());
} catch (Exception e) {
ReportingUtils.logError(e);
}
long dTime = System.nanoTime() - eng_.getStartTimeNs();
MDUtils.put(md_, "ElapsedTime-ms", ((double) dTime) / 1e9);
try {
core_.waitForDevice(core_.getShutterDevice());
if (core_.getAutoShutter())
core_.setAutoShutter(false);
if (eng_.autoShutterSelected_ && !core_.getShutterOpen()) {
core_.setShutterOpen(true);
log("opened shutter");
}
Object pixels;
if (!imageRequest_.collectBurst) {
core_.snapImage(); //Should be: core_.snapImage(jsonMetadata);
log("snapped image");
if (eng_.autoShutterSelected_ && imageRequest_.CloseShutter) {
core_.waitForDevice(core_.getShutterDevice());
core_.setShutterOpen(false);
log("closed shutter");
}
pixels = core_.getImage();
} else {
if (imageRequest_.startBurstN > 0) {
if (eng_.autoShutterSelected_)
core_.setAutoShutter(true);
core_.startSequenceAcquisition(imageRequest_.startBurstN,
0, false);
log("started a burst with " + imageRequest_.startBurstN + " images.");
}
while (core_.getRemainingImageCount() == 0)
JavaUtils.sleep(5);
pixels = core_.popNextImage();
log("collected burst image");
}
MDUtils.put(md_, "Source",core_.getCameraDevice());
Configuration config = core_.getSystemStateCache();
MDUtils.addConfiguration(md_, config);
if (imageRequest_.NextWaitTime > 0) {
long nextFrameTimeMs = (long) (imageRequest_.NextWaitTime + eng_.lastWakeTime_);
MDUtils.put(md_, "NextFrameTimeMs", nextFrameTimeMs);
}
MDUtils.addRandomUUID(md_);
MDUtils.put(md_, "Time", iso8601modified.format(new Date()));
TaggedImage taggedImage = new TaggedImage(pixels, md_);
eng_.imageReceivingQueue_.add(taggedImage);
} catch (Exception ex) {
ReportingUtils.logError(ex);
}
}
|
diff --git a/src/callgraphanalyzer/NetworkBuilder.java b/src/callgraphanalyzer/NetworkBuilder.java
index e8ec24a..a028098 100644
--- a/src/callgraphanalyzer/NetworkBuilder.java
+++ b/src/callgraphanalyzer/NetworkBuilder.java
@@ -1,97 +1,101 @@
package callgraphanalyzer;
import java.util.List;
import models.Commit;
import models.CommitTree;
import models.Node;
import db.CallGraphDb;
public class NetworkBuilder
{
private Comparator compare;
private CallGraphAnalyzer cga;
private CallGraphDb db;
private CommitTree ct;
private String startCommit;
private String endCommit;
public NetworkBuilder(CallGraphDb db, CommitTree ct, String startCommit)
{
super();
this.db = db;
this.ct = ct;
this.startCommit = startCommit;
// Initialize both CGs to the start commit for updating
compare = new Comparator(db, startCommit, startCommit);
}
public void buildAllNetworks() {
traverseCommitTree(this.ct.getRoot());
}
private void traverseCommitTree(Node parent) {
while(parent != null) {
List<Node> children = parent.getChildren();
if(children.size() == 1) {
if(!isMergeCommit(children.get(0).getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, children.get(0).getCommitID());
buildNetwork(parent.getCommitID(), children.get(0).getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, children.get(0).getCommitID());
}
+ else {
+ compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, children.get(0).getCommitID());
+ compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, children.get(0).getCommitID());
+ }
parent = children.get(0);
}
else if(children.size() > 1) {
for(Node child: children) {
if(!isMergeCommit(child.getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
buildNetwork(parent.getCommitID(), child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
else {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
traverseCommitTree(child);
//Restore
compare.newCallGraph = compare.batchReverseUpdate(compare.newCallGraph, this.ct, parent.getCommitID());
compare.oldCallGraph = compare.batchReverseUpdate(compare.oldCallGraph, this.ct, parent.getCommitID());
}
parent = null;
}
else {
parent = null;
}
}
}
private boolean isMergeCommit(String commitID) {
List<Commit> parents = db.getCommitParents(commitID);
return parents.size() > 1;
}
private void buildNetwork(String initial, String change) {
System.out.println("Generating network for: " + initial + " - " + change);
//compare = new Comparator(db, initial, change);
cga = new CallGraphAnalyzer();
// Update the comparator with the commits
compare.updateCGVariables(initial, change);
System.out.println("Comparing Commits...");
compare.CompareCommits(initial, change);
cga.init(compare);
System.out.println("Generating the relationships...");
cga.generateRelationships();
cga.exportRelations();
System.out.println();
}
}
| true | true | private void traverseCommitTree(Node parent) {
while(parent != null) {
List<Node> children = parent.getChildren();
if(children.size() == 1) {
if(!isMergeCommit(children.get(0).getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, children.get(0).getCommitID());
buildNetwork(parent.getCommitID(), children.get(0).getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, children.get(0).getCommitID());
}
parent = children.get(0);
}
else if(children.size() > 1) {
for(Node child: children) {
if(!isMergeCommit(child.getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
buildNetwork(parent.getCommitID(), child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
else {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
traverseCommitTree(child);
//Restore
compare.newCallGraph = compare.batchReverseUpdate(compare.newCallGraph, this.ct, parent.getCommitID());
compare.oldCallGraph = compare.batchReverseUpdate(compare.oldCallGraph, this.ct, parent.getCommitID());
}
parent = null;
}
else {
parent = null;
}
}
}
| private void traverseCommitTree(Node parent) {
while(parent != null) {
List<Node> children = parent.getChildren();
if(children.size() == 1) {
if(!isMergeCommit(children.get(0).getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, children.get(0).getCommitID());
buildNetwork(parent.getCommitID(), children.get(0).getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, children.get(0).getCommitID());
}
else {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, children.get(0).getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, children.get(0).getCommitID());
}
parent = children.get(0);
}
else if(children.size() > 1) {
for(Node child: children) {
if(!isMergeCommit(child.getCommitID())) {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
buildNetwork(parent.getCommitID(), child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
else {
compare.newCallGraph = compare.forwardUpdateCallGraph(compare.newCallGraph, child.getCommitID());
compare.oldCallGraph = compare.forwardUpdateCallGraph(compare.oldCallGraph, child.getCommitID());
}
traverseCommitTree(child);
//Restore
compare.newCallGraph = compare.batchReverseUpdate(compare.newCallGraph, this.ct, parent.getCommitID());
compare.oldCallGraph = compare.batchReverseUpdate(compare.oldCallGraph, this.ct, parent.getCommitID());
}
parent = null;
}
else {
parent = null;
}
}
}
|
diff --git a/asakusa-test-driver/src/main/java/com/asakusafw/testdriver/TestDriverTestToolsBase.java b/asakusa-test-driver/src/main/java/com/asakusafw/testdriver/TestDriverTestToolsBase.java
index d4e0a2f5d..36ccf9f1f 100644
--- a/asakusa-test-driver/src/main/java/com/asakusafw/testdriver/TestDriverTestToolsBase.java
+++ b/asakusa-test-driver/src/main/java/com/asakusafw/testdriver/TestDriverTestToolsBase.java
@@ -1,66 +1,66 @@
package com.asakusafw.testdriver;
import java.io.File;
import java.io.IOException;
import java.util.List;
import com.asakusafw.testtools.TestUtils;
/**
* asakusa-test-toolsが提供するAPIを使って実装されたテストドライバの基底クラス。
*
*/
public class TestDriverTestToolsBase extends TestDriverBase {
/** テストデータ生成・検証ツールオブジェクト。 */
protected TestUtils testUtils;
/** TestUtils生成時に指定するテストデータ定義シートのファイルリスト。 */
protected List<File> testDataFileList;
/** TestUtils生成時に指定するテストデータ定義シートのディレクトリ(testDataFileListと排他)。 */
protected File testDataDir;
/**
* コンストラクタ。
*
* @throws RuntimeException 初期化に失敗した場合
*/
public TestDriverTestToolsBase() {
super(null);
}
/**
* コンストラクタ。
*
* @param testDataFileList テストデータ定義シートのパスを示すFileのリスト
* @throws RuntimeException 初期化に失敗した場合
*/
public TestDriverTestToolsBase(List<File> testDataFileList) {
super(null);
this.testDataFileList = testDataFileList;
}
@Override
protected void initialize() {
super.initialize();
try {
- System.setProperty("ASAKUSA_TESTTOOLS_CONF", buildProperties.getProperty("asakusa.testtools.conf"));
+ System.setProperty("ASAKUSA_TESTTOOLS_CONF", buildProperties.getProperty("asakusa.jdbc.conf"));
System.setProperty("ASAKUSA_TEMPLATEGEN_OUTPUT_DIR", buildProperties.getProperty("asakusa.testdatasheet.output"));
String testDataDirPath = buildProperties.getProperty("asakusa.testdriver.testdata.dir");
if (testDataDirPath == null) {
testDataDirPath = TestDriverBase.TESTDATA_DIR_DEFAULT;
}
if (testDataFileList == null) {
testDataDir = new File(testDataDirPath + System.getProperty("file.separator")
+ driverContext.getClassName() + System.getProperty("file.separator")
+ driverContext.getMethodName());
testUtils = new TestUtils(testDataDir);
} else {
testUtils = new TestUtils(testDataFileList);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| true | true | protected void initialize() {
super.initialize();
try {
System.setProperty("ASAKUSA_TESTTOOLS_CONF", buildProperties.getProperty("asakusa.testtools.conf"));
System.setProperty("ASAKUSA_TEMPLATEGEN_OUTPUT_DIR", buildProperties.getProperty("asakusa.testdatasheet.output"));
String testDataDirPath = buildProperties.getProperty("asakusa.testdriver.testdata.dir");
if (testDataDirPath == null) {
testDataDirPath = TestDriverBase.TESTDATA_DIR_DEFAULT;
}
if (testDataFileList == null) {
testDataDir = new File(testDataDirPath + System.getProperty("file.separator")
+ driverContext.getClassName() + System.getProperty("file.separator")
+ driverContext.getMethodName());
testUtils = new TestUtils(testDataDir);
} else {
testUtils = new TestUtils(testDataFileList);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
| protected void initialize() {
super.initialize();
try {
System.setProperty("ASAKUSA_TESTTOOLS_CONF", buildProperties.getProperty("asakusa.jdbc.conf"));
System.setProperty("ASAKUSA_TEMPLATEGEN_OUTPUT_DIR", buildProperties.getProperty("asakusa.testdatasheet.output"));
String testDataDirPath = buildProperties.getProperty("asakusa.testdriver.testdata.dir");
if (testDataDirPath == null) {
testDataDirPath = TestDriverBase.TESTDATA_DIR_DEFAULT;
}
if (testDataFileList == null) {
testDataDir = new File(testDataDirPath + System.getProperty("file.separator")
+ driverContext.getClassName() + System.getProperty("file.separator")
+ driverContext.getMethodName());
testUtils = new TestUtils(testDataDir);
} else {
testUtils = new TestUtils(testDataFileList);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
|
diff --git a/nuxeo-core-storage-sql/nuxeo-core-storage-sql/src/main/java/org/nuxeo/ecm/core/storage/sql/jdbc/dialect/DialectMySQL.java b/nuxeo-core-storage-sql/nuxeo-core-storage-sql/src/main/java/org/nuxeo/ecm/core/storage/sql/jdbc/dialect/DialectMySQL.java
index 7c98cf221..87ea3721d 100644
--- a/nuxeo-core-storage-sql/nuxeo-core-storage-sql/src/main/java/org/nuxeo/ecm/core/storage/sql/jdbc/dialect/DialectMySQL.java
+++ b/nuxeo-core-storage-sql/nuxeo-core-storage-sql/src/main/java/org/nuxeo/ecm/core/storage/sql/jdbc/dialect/DialectMySQL.java
@@ -1,426 +1,426 @@
/*
* (C) Copyright 2008-2009 Nuxeo SA (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* Florent Guillaume
*/
package org.nuxeo.ecm.core.storage.sql.jdbc.dialect;
import java.io.Serializable;
import java.net.SocketException;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.nuxeo.common.utils.StringUtils;
import org.nuxeo.ecm.core.storage.StorageException;
import org.nuxeo.ecm.core.storage.sql.Binary;
import org.nuxeo.ecm.core.storage.sql.BinaryManager;
import org.nuxeo.ecm.core.storage.sql.ColumnType;
import org.nuxeo.ecm.core.storage.sql.Model;
import org.nuxeo.ecm.core.storage.sql.RepositoryDescriptor;
import org.nuxeo.ecm.core.storage.sql.jdbc.db.Column;
import org.nuxeo.ecm.core.storage.sql.jdbc.db.Database;
import org.nuxeo.ecm.core.storage.sql.jdbc.db.Table;
/**
* MySQL-specific dialect.
*
* @author Florent Guillaume
*/
public class DialectMySQL extends Dialect {
public DialectMySQL(DatabaseMetaData metadata, BinaryManager binaryManager,
RepositoryDescriptor repositoryDescriptor) throws StorageException {
super(metadata, binaryManager, repositoryDescriptor);
}
@Override
public char openQuote() {
return '`';
}
@Override
public char closeQuote() {
return '`';
}
@Override
public String getAddForeignKeyConstraintString(String constraintName,
String[] foreignKeys, String referencedTable, String[] primaryKeys,
boolean referencesPrimaryKey) {
String cols = StringUtils.join(foreignKeys, ", ");
String sql = String.format(
" ADD INDEX %s (%s), ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)",
constraintName, cols, constraintName, cols, referencedTable,
StringUtils.join(primaryKeys, ", "));
return sql;
}
@Override
public boolean qualifyIndexName() {
return false;
}
@Override
public boolean supportsIfExistsBeforeTableName() {
return true;
}
@Override
public JDBCInfo getJDBCTypeAndString(ColumnType type) {
switch (type) {
case VARCHAR:
// don't use the max 65535 because this max is actually for the
// total size of all columns of a given table, so allow several
// varchar columns in the same table
return jdbcInfo("VARCHAR(500)", Types.VARCHAR);
case CLOB:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case BOOLEAN:
return jdbcInfo("BIT", Types.BIT);
case LONG:
return jdbcInfo("BIGINT", Types.BIGINT);
case DOUBLE:
return jdbcInfo("DOUBLE", Types.DOUBLE);
case TIMESTAMP:
return jdbcInfo("DATETIME", Types.TIMESTAMP);
case BLOBID:
- return jdbcInfo("VARCHAR(40)", Types.VARCHAR);
+ return jdbcInfo("VARCHAR(40) BINARY", Types.VARCHAR);
// -----
case NODEID:
case NODEIDFK:
case NODEIDFKNP:
case NODEIDFKMUL:
case NODEIDFKNULL:
case NODEVAL:
- return jdbcInfo("VARCHAR(36)", Types.VARCHAR);
+ return jdbcInfo("VARCHAR(36) BINARY", Types.VARCHAR);
case SYSNAME:
return jdbcInfo("VARCHAR(256) BINARY", Types.VARCHAR);
case TINYINT:
return jdbcInfo("TINYINT", Types.TINYINT);
case INTEGER:
return jdbcInfo("INTEGER", Types.INTEGER);
case FTINDEXED:
throw new AssertionError(type);
case FTSTORED:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case CLUSTERNODE:
return jdbcInfo("BIGINT", Types.BIGINT);
case CLUSTERFRAGS:
return jdbcInfo("TEXT", Types.VARCHAR);
}
throw new AssertionError(type);
}
@Override
public boolean isAllowedConversion(int expected, int actual,
String actualName, int actualSize) {
// LONGVARCHAR vs VARCHAR compatibility
if (expected == Types.VARCHAR && actual == Types.LONGVARCHAR) {
return true;
}
if (expected == Types.LONGVARCHAR && actual == Types.VARCHAR) {
return true;
}
// INTEGER vs BIGINT compatibility
if (expected == Types.BIGINT && actual == Types.INTEGER) {
return true;
}
if (expected == Types.INTEGER && actual == Types.BIGINT) {
return true;
}
return false;
}
@Override
public void setToPreparedStatement(PreparedStatement ps, int index,
Serializable value, Column column) throws SQLException {
switch (column.getJdbcType()) {
case Types.VARCHAR:
case Types.LONGVARCHAR:
String v;
if (column.getType() == ColumnType.BLOBID) {
v = ((Binary) value).getDigest();
} else {
v = (String) value;
}
ps.setString(index, v);
break;
case Types.BIT:
ps.setBoolean(index, ((Boolean) value).booleanValue());
return;
case Types.TINYINT:
case Types.INTEGER:
case Types.BIGINT:
ps.setLong(index, ((Long) value).longValue());
return;
case Types.DOUBLE:
ps.setDouble(index, ((Double) value).doubleValue());
return;
case Types.TIMESTAMP:
Calendar cal = (Calendar) value;
Timestamp ts = new Timestamp(cal.getTimeInMillis());
ps.setTimestamp(index, ts, cal); // cal passed for timezone
return;
default:
throw new SQLException("Unhandled JDBC type: "
+ column.getJdbcType());
}
}
@Override
@SuppressWarnings("boxing")
public Serializable getFromResultSet(ResultSet rs, int index, Column column)
throws SQLException {
switch (column.getJdbcType()) {
case Types.VARCHAR:
case Types.LONGVARCHAR:
String string = rs.getString(index);
if (column.getType() == ColumnType.BLOBID && string != null) {
return getBinaryManager().getBinary(string);
} else {
return string;
}
case Types.BIT:
return rs.getBoolean(index);
case Types.TINYINT:
case Types.INTEGER:
case Types.BIGINT:
return rs.getLong(index);
case Types.DOUBLE:
return rs.getDouble(index);
case Types.TIMESTAMP:
Timestamp ts = rs.getTimestamp(index);
if (ts == null) {
return null;
} else {
Serializable cal = new GregorianCalendar(); // XXX timezone
((Calendar) cal).setTimeInMillis(ts.getTime());
return cal;
}
}
throw new SQLException("Unhandled JDBC type: " + column.getJdbcType());
}
@Override
public String getCreateFulltextIndexSql(String indexName,
String quotedIndexName, Table table, List<Column> columns,
Model model) {
List<String> columnNames = new ArrayList<String>(columns.size());
for (Column col : columns) {
columnNames.add(col.getQuotedName());
}
return String.format("CREATE FULLTEXT INDEX %s ON %s (%s)",
quotedIndexName, table.getQuotedName(), StringUtils.join(
columnNames, ", "));
}
@Override
public String getDialectFulltextQuery(String query) {
query = query.replaceAll(" +", " ");
List<String> pos = new LinkedList<String>();
List<String> neg = new LinkedList<String>();
for (String word : StringUtils.split(query, ' ', false)) {
if (word.startsWith("-")) {
neg.add(word);
} else if (word.startsWith("+")) {
pos.add(word);
} else {
pos.add("+" + word);
}
}
if (pos.isEmpty()) {
return "+DONTMATCHANYTHINGFOREMPTYQUERY";
}
String res = StringUtils.join(pos, " ");
if (!neg.isEmpty()) {
res += " " + StringUtils.join(neg, " ");
}
return res;
}
// SELECT ..., (MATCH(`fulltext`.`simpletext`, `fulltext`.`binarytext`)
// .................. AGAINST (?) / 10) AS nxscore
// FROM ... LEFT JOIN `fulltext` ON ``fulltext`.`id` = `hierarchy`.`id`
// WHERE ... AND MATCH(`fulltext`.`simpletext`, `fulltext`.`binarytext`)
// ................... AGAINST (? IN BOOLEAN MODE)
// ORDER BY nxscore DESC
@Override
public FulltextMatchInfo getFulltextScoredMatchInfo(String fulltextQuery,
String indexName, int nthMatch, Column mainColumn, Model model,
Database database) {
String nthSuffix = nthMatch == 1 ? "" : String.valueOf(nthMatch);
String scoreAlias = "_nxscore" + nthSuffix;
String indexSuffix = model.getFulltextIndexSuffix(indexName);
Table ft = database.getTable(model.FULLTEXT_TABLE_NAME);
Column ftMain = ft.getColumn(model.MAIN_KEY);
Column stColumn = ft.getColumn(model.FULLTEXT_SIMPLETEXT_KEY
+ indexSuffix);
Column btColumn = ft.getColumn(model.FULLTEXT_BINARYTEXT_KEY
+ indexSuffix);
String match = String.format("MATCH (%s, %s)",
stColumn.getFullQuotedName(), btColumn.getFullQuotedName());
FulltextMatchInfo info = new FulltextMatchInfo();
info.join = String.format(
"%s ON %s = %s", //
ft.getQuotedName(), ftMain.getFullQuotedName(),
mainColumn.getFullQuotedName());
info.whereExpr = String.format("%s AGAINST (? IN BOOLEAN MODE)", match);
info.whereExprParam = fulltextQuery;
// Note: using the boolean query in non-boolean mode gives approximate
// results but it's the best we have as MySQL does not provide a score
// in boolean mode.
// Note: dividing by 10 is arbitrary, but MySQL cannot really
// normalize scores.
info.scoreExpr = String.format("(%s AGAINST (?) / 10) AS %s", match,
scoreAlias);
info.scoreExprParam = fulltextQuery;
info.scoreAlias = scoreAlias;
info.scoreCol = new Column(mainColumn.getTable(), null,
ColumnType.DOUBLE, null);
return info;
}
@Override
public boolean getMaterializeFulltextSyntheticColumn() {
return false;
}
@Override
public int getFulltextIndexedColumns() {
return 2;
}
@Override
public String getTableTypeString(Table table) {
if (table.hasFulltextIndex()) {
return " ENGINE=MyISAM";
} else {
return " ENGINE=InnoDB";
}
}
@Override
public boolean supportsUpdateFrom() {
return true;
}
@Override
public boolean doesUpdateFromRepeatSelf() {
return true;
}
@Override
public boolean needsOrderByKeysAfterDistinct() {
return false;
}
@Override
public boolean needsAliasForDerivedTable() {
return true;
}
@Override
public String getSecurityCheckSql(String idColumnName) {
return String.format("NX_ACCESS_ALLOWED(%s, ?, ?)", idColumnName);
}
@Override
public String getInTreeSql(String idColumnName) {
return String.format("NX_IN_TREE(%s, ?)", idColumnName);
}
@Override
public String getSQLStatementsFilename() {
return "nuxeovcs/mysql.sql.txt";
}
@Override
public Map<String, Serializable> getSQLStatementsProperties(Model model,
Database database) {
Map<String, Serializable> properties = new HashMap<String, Serializable>();
properties.put("idType", "varchar(36)");
properties.put("fulltextEnabled", Boolean.valueOf(!fulltextDisabled));
return properties;
}
@Override
public boolean connectionClosedByException(Throwable t) {
while (t.getCause() != null) {
t = t.getCause();
}
if (t instanceof SocketException) {
return true;
}
// XAResource.start:
// com.mysql.jdbc.jdbc2.optional.MysqlXAException
// No operations allowed after connection closed. Connection was
// implicitly closed due to underlying exception/error:
// com.mysql.jdbc.exceptions.jdbc4.CommunicationsException:
// Communications link failure
String message = t.toString() + " " + t.getMessage();
if (message.contains("Communications link failure")
|| message.contains("CommunicationsException")) {
return true;
}
return false;
}
@Override
public boolean isClusteringSupported() {
return true;
}
@Override
public boolean isClusteringDeleteNeeded() {
return true;
}
@Override
public String getClusterInsertInvalidations() {
return "CALL NX_CLUSTER_INVAL(?, ?, ?)";
}
@Override
public String getClusterGetInvalidations() {
return "SELECT id, fragments, kind FROM cluster_invals WHERE nodeid = @@PSEUDO_THREAD_ID";
}
@Override
public String getClusterDeleteInvalidations() {
return "DELETE FROM cluster_invals WHERE nodeid = @@PSEUDO_THREAD_ID";
}
@Override
public boolean supportsPaging() {
return true;
}
@Override
public String getPagingClause(long limit, long offset) {
return String.format("LIMIT %d OFFSET %d", limit, offset);
}
}
| false | true | public JDBCInfo getJDBCTypeAndString(ColumnType type) {
switch (type) {
case VARCHAR:
// don't use the max 65535 because this max is actually for the
// total size of all columns of a given table, so allow several
// varchar columns in the same table
return jdbcInfo("VARCHAR(500)", Types.VARCHAR);
case CLOB:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case BOOLEAN:
return jdbcInfo("BIT", Types.BIT);
case LONG:
return jdbcInfo("BIGINT", Types.BIGINT);
case DOUBLE:
return jdbcInfo("DOUBLE", Types.DOUBLE);
case TIMESTAMP:
return jdbcInfo("DATETIME", Types.TIMESTAMP);
case BLOBID:
return jdbcInfo("VARCHAR(40)", Types.VARCHAR);
// -----
case NODEID:
case NODEIDFK:
case NODEIDFKNP:
case NODEIDFKMUL:
case NODEIDFKNULL:
case NODEVAL:
return jdbcInfo("VARCHAR(36)", Types.VARCHAR);
case SYSNAME:
return jdbcInfo("VARCHAR(256) BINARY", Types.VARCHAR);
case TINYINT:
return jdbcInfo("TINYINT", Types.TINYINT);
case INTEGER:
return jdbcInfo("INTEGER", Types.INTEGER);
case FTINDEXED:
throw new AssertionError(type);
case FTSTORED:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case CLUSTERNODE:
return jdbcInfo("BIGINT", Types.BIGINT);
case CLUSTERFRAGS:
return jdbcInfo("TEXT", Types.VARCHAR);
}
throw new AssertionError(type);
}
| public JDBCInfo getJDBCTypeAndString(ColumnType type) {
switch (type) {
case VARCHAR:
// don't use the max 65535 because this max is actually for the
// total size of all columns of a given table, so allow several
// varchar columns in the same table
return jdbcInfo("VARCHAR(500)", Types.VARCHAR);
case CLOB:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case BOOLEAN:
return jdbcInfo("BIT", Types.BIT);
case LONG:
return jdbcInfo("BIGINT", Types.BIGINT);
case DOUBLE:
return jdbcInfo("DOUBLE", Types.DOUBLE);
case TIMESTAMP:
return jdbcInfo("DATETIME", Types.TIMESTAMP);
case BLOBID:
return jdbcInfo("VARCHAR(40) BINARY", Types.VARCHAR);
// -----
case NODEID:
case NODEIDFK:
case NODEIDFKNP:
case NODEIDFKMUL:
case NODEIDFKNULL:
case NODEVAL:
return jdbcInfo("VARCHAR(36) BINARY", Types.VARCHAR);
case SYSNAME:
return jdbcInfo("VARCHAR(256) BINARY", Types.VARCHAR);
case TINYINT:
return jdbcInfo("TINYINT", Types.TINYINT);
case INTEGER:
return jdbcInfo("INTEGER", Types.INTEGER);
case FTINDEXED:
throw new AssertionError(type);
case FTSTORED:
return jdbcInfo("LONGTEXT", Types.LONGVARCHAR);
case CLUSTERNODE:
return jdbcInfo("BIGINT", Types.BIGINT);
case CLUSTERFRAGS:
return jdbcInfo("TEXT", Types.VARCHAR);
}
throw new AssertionError(type);
}
|
diff --git a/src/me/ellbristow/greylistVote/greylistVote.java b/src/me/ellbristow/greylistVote/greylistVote.java
index 84d79b9..704ff53 100644
--- a/src/me/ellbristow/greylistVote/greylistVote.java
+++ b/src/me/ellbristow/greylistVote/greylistVote.java
@@ -1,683 +1,691 @@
package me.ellbristow.greylistVote;
import java.io.File;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import org.bukkit.permissions.PermissionAttachment;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
public class greylistVote extends JavaPlugin {
public static greylistVote plugin;
public final Logger logger = Logger.getLogger("Minecraft");
public final greyBlockListener blockListener = new greyBlockListener(this);
public final greyPlayerListener loginListener = new greyPlayerListener(this);
protected FileConfiguration config;
public FileConfiguration usersConfig = null;
private File usersFile = null;
@Override
public void onDisable() {
PluginDescriptionFile pdfFile = this.getDescription();
this.logger.info("[" + pdfFile.getName() + "] is now disabled.");
}
@Override
public void onEnable() {
PluginDescriptionFile pdfFile = this.getDescription();
this.logger.info("[" + pdfFile.getName() + "] version " + pdfFile.getVersion() + " is enabled.");
PluginManager pm = getServer().getPluginManager();
pm.registerEvents(blockListener, this);
pm.registerEvents(loginListener, this);
this.config = this.getConfig();
this.config.set("required_votes", this.config.getInt("required_votes", 2));
this.config.set("no_pvp", this.config.getBoolean("no_pvp", true));
this.saveConfig();
this.usersConfig = this.getUsersConfig();
}
public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (commandLabel.equalsIgnoreCase("glv")) {
if (args.length == 0) {
PluginDescriptionFile pdfFile = this.getDescription();
sender.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
sender.sendMessage(ChatColor.GOLD + "Commands: {optional} [required]");
sender.sendMessage(ChatColor.GOLD + " /glv " + ChatColor.GRAY + ": View all GreylistVote commands");
sender.sendMessage(ChatColor.GOLD + " /greylist [player] || /gl [player] || /trust Player " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Increase player's reputation");
sender.sendMessage(ChatColor.GOLD + " /griefer [player] " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Reduce player's reputation");
sender.sendMessage(ChatColor.GOLD + " /votelist {player} || /glvlist {player} " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " View your (or player's) reputation");
if (sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "Admin Commands:");
sender.sendMessage(ChatColor.GOLD + " /glv setrep [req. rep] " + ChatColor.GRAY + ": Set required reputation");
sender.sendMessage(ChatColor.GOLD + " /glv clearserver [player] " + ChatColor.GRAY + ": Remove player's Server votes");
sender.sendMessage(ChatColor.GOLD + " /glv clearall [player] " + ChatColor.GRAY + ": Remove all player's votes");
}
return true;
}
else if (args.length == 2) {
if (!sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to do that!");
return false;
}
int reqVotes = config.getInt("required_votes", 2);
if (args[0].equalsIgnoreCase("setrep")) {
try {
reqVotes = Integer.parseInt(args[1]);
}
catch(NumberFormatException nfe) {
// Failed. Number not an integer
sender.sendMessage(ChatColor.RED + "[req. votes] must be a number!" );
return false;
}
this.config.set("required_votes", reqVotes);
this.saveConfig();
sender.sendMessage(ChatColor.GOLD + "Reputation requirement now set to " + ChatColor.WHITE + args[1]);
sender.sendMessage(ChatColor.GOLD + "Player approval will be updated on next login.");
return true;
}
else if (args[0].equalsIgnoreCase("clearserver")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
String newVoteList = null;
String[] newVoteArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
for (String vote : voteArray) {
if (!vote.equals("Server")) {
if (newVoteList != null) {
newVoteList += "," + vote;
} else {
newVoteList = vote;
}
}
}
if (newVoteList != null) {
newVoteArray = newVoteList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
}
String newGriefList = null;
String[] newGriefArray = null;
if (griefList != null) {
griefArray = griefList.split(",");
for (String vote : griefArray) {
if (!vote.equals("Server")) {
if (newGriefList != null) {
newGriefList += "," + vote;
} else {
newGriefList = vote;
}
}
}
if (newGriefList != null) {
newGriefArray = newGriefList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
}
saveUsersConfig();
int rep = 0;
if (newVoteList != null) {
voteArray = newVoteList.split(",");
rep += newVoteArray.length;
}
if (newGriefList != null) {
griefArray = newGriefList.split(",");
rep -= newGriefArray.length;
}
sender.sendMessage(ChatColor.GOLD + "'Server' votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your Server Approval/Black-Ball votes were removed!");
if (rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else if (args[0].equalsIgnoreCase("clearall")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(ChatColor.GOLD + "ALL votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.RED + "Your reputation was reset to 0!");
if (0 >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (0 < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else {
sender.sendMessage(ChatColor.RED + "Command not recognised!");
return false;
}
}
return false;
}
else if (commandLabel.equalsIgnoreCase("greylist") || commandLabel.equalsIgnoreCase("gl") || commandLabel.equalsIgnoreCase("trust")) {
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
if (!sender.hasPermission("greylistvote.vote")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".votes", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + "!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + " by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to " + reqVotes + " by the Server!");
}
}
if (target.isOnline() && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setApproved(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName().equalsIgnoreCase(target.getName())) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
boolean found = false;
if (voteArray != null) {
for (String vote : voteArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (griefArray != null) {
String newGriefList = null;
for (String vote : griefArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
- newGriefList += "," + vote;
+ if (newGriefList != null) {
+ newGriefList += "," + vote;
+ } else {
+ newGriefList = vote;
+ }
}
}
if (newGriefList != null) {
newGriefList = newGriefList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
griefArray = newGriefList.split(",");
}
else {
griefArray = null;
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
// Tell everyone about the reputation change
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GREEN + " increased your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (voteList.equals("")) {
voteList = sender.getName();
}
else {
voteList = voteList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".votes", voteList);
voteArray = voteList.split(",");
int rep = 0;
if (voteArray.length != 0) {
rep += voteArray.length;
}
if (griefArray != null) {
if (griefArray.length != 0) {
rep -= griefArray.length;
}
}
if (target.isOnline() && rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setApproved(target.getPlayer());
}
else if (!target.isOnline() && rep >= reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has reached " + reqVotes + "!");
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + " can now build!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("griefer")) {
if (!sender.hasPermission("greylistvote.griefer")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".votes", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1 by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to -1 by the Server!");
}
}
if (target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setGriefer(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName() == target.getName()) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
boolean found = false;
if (griefArray != null) {
for (String vote : griefArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (voteArray != null) {
String newVoteList = null;
for (String vote : voteArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
- newVoteList += "," + vote;
+ if (newVoteList != null) {
+ newVoteList += "," + vote;
+ } else {
+ newVoteList = vote;
+ }
}
}
if (newVoteList != null) {
newVoteList = newVoteList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
voteArray = newVoteList.split(",");
}
else {
voteArray = null;
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.RED + " reduced your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (griefList.equals("")) {
griefList = sender.getName();
}
else {
griefList = griefList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", griefList);
griefArray = griefList.split(",");
int rep = 0;
if (voteArray != null) {
rep += voteArray.length;
}
if (griefArray != null) {
rep -= griefArray.length;
}
if (target.isOnline() && rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setGriefer(target.getPlayer());
}
else if (!target.isOnline() && rep < reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has dropped below " + reqVotes + "!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("votelist") || commandLabel.equalsIgnoreCase("glvlist")) {
if (args.equals(null) || args.length == 0) {
String voteList = this.usersConfig.getString(sender.getName().toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(sender.getName().toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(ChatColor.GOLD + "You have not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(ChatColor.GOLD + "You have received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
else {
OfflinePlayer checktarget = getServer().getOfflinePlayer(args[0]);
String DN = null;
String target = null;
if (checktarget.isOnline()) {
target = checktarget.getPlayer().getName();
DN = checktarget.getPlayer().getDisplayName();
}
else {
if (checktarget != null) {
target = checktarget.getName();
DN = checktarget.getName();
}
}
if (target == null) {
// Player not found
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
String voteList = this.usersConfig.getString(target.toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(target.toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(DN + ChatColor.GOLD + " has not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(DN + ChatColor.GOLD + " has received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
}
return false;
}
public void setApproved(Player target) {
PermissionAttachment attachment = target.addAttachment(this);
attachment.setPermission("greylistvote.build", true);
Player[] onlinePlayers = getServer().getOnlinePlayers();
int reqVotes = config.getInt("required_votes");
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has reached " + reqVotes + "!");
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + " can now build!");
}
else {
chatPlayer.sendMessage(ChatColor.GREEN + "Your reputation has reached " + reqVotes + "!");
chatPlayer.sendMessage(ChatColor.GREEN + "You can now build!");
}
}
}
public void setGriefer(Player target) {
PermissionAttachment attachment = target.addAttachment(this);
attachment.setPermission("greylistvote.build", false);
Player[] onlinePlayers = getServer().getOnlinePlayers();
int reqVotes = config.getInt("required_votes");
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has dropped below " + reqVotes + "!");
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + " can no longer build!");
}
else {
chatPlayer.sendMessage(ChatColor.RED + "Your reputation has dropped below " + reqVotes + "!");
chatPlayer.sendMessage(ChatColor.RED + "Your build rights have been revoked!");
}
}
}
public void loadUsersConfig() {
if (this.usersFile == null) {
this.usersFile = new File(getDataFolder(),"users.yml");
}
this.usersConfig = YamlConfiguration.loadConfiguration(this.usersFile);
}
public FileConfiguration getUsersConfig() {
if (this.usersConfig == null) {
this.loadUsersConfig();
}
return this.usersConfig;
}
public void saveUsersConfig() {
if (this.usersConfig == null || this.usersFile == null) {
return;
}
try {
this.usersConfig.save(this.usersFile);
} catch (IOException ex) {
this.logger.log(Level.SEVERE, "Could not save " + this.usersFile, ex );
}
}
}
| false | true | public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (commandLabel.equalsIgnoreCase("glv")) {
if (args.length == 0) {
PluginDescriptionFile pdfFile = this.getDescription();
sender.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
sender.sendMessage(ChatColor.GOLD + "Commands: {optional} [required]");
sender.sendMessage(ChatColor.GOLD + " /glv " + ChatColor.GRAY + ": View all GreylistVote commands");
sender.sendMessage(ChatColor.GOLD + " /greylist [player] || /gl [player] || /trust Player " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Increase player's reputation");
sender.sendMessage(ChatColor.GOLD + " /griefer [player] " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Reduce player's reputation");
sender.sendMessage(ChatColor.GOLD + " /votelist {player} || /glvlist {player} " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " View your (or player's) reputation");
if (sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "Admin Commands:");
sender.sendMessage(ChatColor.GOLD + " /glv setrep [req. rep] " + ChatColor.GRAY + ": Set required reputation");
sender.sendMessage(ChatColor.GOLD + " /glv clearserver [player] " + ChatColor.GRAY + ": Remove player's Server votes");
sender.sendMessage(ChatColor.GOLD + " /glv clearall [player] " + ChatColor.GRAY + ": Remove all player's votes");
}
return true;
}
else if (args.length == 2) {
if (!sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to do that!");
return false;
}
int reqVotes = config.getInt("required_votes", 2);
if (args[0].equalsIgnoreCase("setrep")) {
try {
reqVotes = Integer.parseInt(args[1]);
}
catch(NumberFormatException nfe) {
// Failed. Number not an integer
sender.sendMessage(ChatColor.RED + "[req. votes] must be a number!" );
return false;
}
this.config.set("required_votes", reqVotes);
this.saveConfig();
sender.sendMessage(ChatColor.GOLD + "Reputation requirement now set to " + ChatColor.WHITE + args[1]);
sender.sendMessage(ChatColor.GOLD + "Player approval will be updated on next login.");
return true;
}
else if (args[0].equalsIgnoreCase("clearserver")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
String newVoteList = null;
String[] newVoteArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
for (String vote : voteArray) {
if (!vote.equals("Server")) {
if (newVoteList != null) {
newVoteList += "," + vote;
} else {
newVoteList = vote;
}
}
}
if (newVoteList != null) {
newVoteArray = newVoteList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
}
String newGriefList = null;
String[] newGriefArray = null;
if (griefList != null) {
griefArray = griefList.split(",");
for (String vote : griefArray) {
if (!vote.equals("Server")) {
if (newGriefList != null) {
newGriefList += "," + vote;
} else {
newGriefList = vote;
}
}
}
if (newGriefList != null) {
newGriefArray = newGriefList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
}
saveUsersConfig();
int rep = 0;
if (newVoteList != null) {
voteArray = newVoteList.split(",");
rep += newVoteArray.length;
}
if (newGriefList != null) {
griefArray = newGriefList.split(",");
rep -= newGriefArray.length;
}
sender.sendMessage(ChatColor.GOLD + "'Server' votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your Server Approval/Black-Ball votes were removed!");
if (rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else if (args[0].equalsIgnoreCase("clearall")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(ChatColor.GOLD + "ALL votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.RED + "Your reputation was reset to 0!");
if (0 >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (0 < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else {
sender.sendMessage(ChatColor.RED + "Command not recognised!");
return false;
}
}
return false;
}
else if (commandLabel.equalsIgnoreCase("greylist") || commandLabel.equalsIgnoreCase("gl") || commandLabel.equalsIgnoreCase("trust")) {
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
if (!sender.hasPermission("greylistvote.vote")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".votes", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + "!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + " by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to " + reqVotes + " by the Server!");
}
}
if (target.isOnline() && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setApproved(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName().equalsIgnoreCase(target.getName())) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
boolean found = false;
if (voteArray != null) {
for (String vote : voteArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (griefArray != null) {
String newGriefList = null;
for (String vote : griefArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
newGriefList += "," + vote;
}
}
if (newGriefList != null) {
newGriefList = newGriefList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
griefArray = newGriefList.split(",");
}
else {
griefArray = null;
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
// Tell everyone about the reputation change
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GREEN + " increased your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (voteList.equals("")) {
voteList = sender.getName();
}
else {
voteList = voteList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".votes", voteList);
voteArray = voteList.split(",");
int rep = 0;
if (voteArray.length != 0) {
rep += voteArray.length;
}
if (griefArray != null) {
if (griefArray.length != 0) {
rep -= griefArray.length;
}
}
if (target.isOnline() && rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setApproved(target.getPlayer());
}
else if (!target.isOnline() && rep >= reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has reached " + reqVotes + "!");
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + " can now build!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("griefer")) {
if (!sender.hasPermission("greylistvote.griefer")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".votes", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1 by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to -1 by the Server!");
}
}
if (target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setGriefer(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName() == target.getName()) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
boolean found = false;
if (griefArray != null) {
for (String vote : griefArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (voteArray != null) {
String newVoteList = null;
for (String vote : voteArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
newVoteList += "," + vote;
}
}
if (newVoteList != null) {
newVoteList = newVoteList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
voteArray = newVoteList.split(",");
}
else {
voteArray = null;
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.RED + " reduced your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (griefList.equals("")) {
griefList = sender.getName();
}
else {
griefList = griefList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", griefList);
griefArray = griefList.split(",");
int rep = 0;
if (voteArray != null) {
rep += voteArray.length;
}
if (griefArray != null) {
rep -= griefArray.length;
}
if (target.isOnline() && rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setGriefer(target.getPlayer());
}
else if (!target.isOnline() && rep < reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has dropped below " + reqVotes + "!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("votelist") || commandLabel.equalsIgnoreCase("glvlist")) {
if (args.equals(null) || args.length == 0) {
String voteList = this.usersConfig.getString(sender.getName().toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(sender.getName().toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(ChatColor.GOLD + "You have not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(ChatColor.GOLD + "You have received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
else {
OfflinePlayer checktarget = getServer().getOfflinePlayer(args[0]);
String DN = null;
String target = null;
if (checktarget.isOnline()) {
target = checktarget.getPlayer().getName();
DN = checktarget.getPlayer().getDisplayName();
}
else {
if (checktarget != null) {
target = checktarget.getName();
DN = checktarget.getName();
}
}
if (target == null) {
// Player not found
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
String voteList = this.usersConfig.getString(target.toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(target.toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(DN + ChatColor.GOLD + " has not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(DN + ChatColor.GOLD + " has received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
}
return false;
}
| public boolean onCommand(CommandSender sender, Command cmd, String commandLabel, String[] args) {
if (commandLabel.equalsIgnoreCase("glv")) {
if (args.length == 0) {
PluginDescriptionFile pdfFile = this.getDescription();
sender.sendMessage(ChatColor.GOLD + pdfFile.getName() + " version " + pdfFile.getVersion() + " by " + pdfFile.getAuthors());
sender.sendMessage(ChatColor.GOLD + "Commands: {optional} [required]");
sender.sendMessage(ChatColor.GOLD + " /glv " + ChatColor.GRAY + ": View all GreylistVote commands");
sender.sendMessage(ChatColor.GOLD + " /greylist [player] || /gl [player] || /trust Player " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Increase player's reputation");
sender.sendMessage(ChatColor.GOLD + " /griefer [player] " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " Reduce player's reputation");
sender.sendMessage(ChatColor.GOLD + " /votelist {player} || /glvlist {player} " + ChatColor.GRAY + ":");
sender.sendMessage(ChatColor.GRAY + " View your (or player's) reputation");
if (sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "Admin Commands:");
sender.sendMessage(ChatColor.GOLD + " /glv setrep [req. rep] " + ChatColor.GRAY + ": Set required reputation");
sender.sendMessage(ChatColor.GOLD + " /glv clearserver [player] " + ChatColor.GRAY + ": Remove player's Server votes");
sender.sendMessage(ChatColor.GOLD + " /glv clearall [player] " + ChatColor.GRAY + ": Remove all player's votes");
}
return true;
}
else if (args.length == 2) {
if (!sender.hasPermission("greylistvote.admin")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to do that!");
return false;
}
int reqVotes = config.getInt("required_votes", 2);
if (args[0].equalsIgnoreCase("setrep")) {
try {
reqVotes = Integer.parseInt(args[1]);
}
catch(NumberFormatException nfe) {
// Failed. Number not an integer
sender.sendMessage(ChatColor.RED + "[req. votes] must be a number!" );
return false;
}
this.config.set("required_votes", reqVotes);
this.saveConfig();
sender.sendMessage(ChatColor.GOLD + "Reputation requirement now set to " + ChatColor.WHITE + args[1]);
sender.sendMessage(ChatColor.GOLD + "Player approval will be updated on next login.");
return true;
}
else if (args[0].equalsIgnoreCase("clearserver")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
String newVoteList = null;
String[] newVoteArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
for (String vote : voteArray) {
if (!vote.equals("Server")) {
if (newVoteList != null) {
newVoteList += "," + vote;
} else {
newVoteList = vote;
}
}
}
if (newVoteList != null) {
newVoteArray = newVoteList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
}
String newGriefList = null;
String[] newGriefArray = null;
if (griefList != null) {
griefArray = griefList.split(",");
for (String vote : griefArray) {
if (!vote.equals("Server")) {
if (newGriefList != null) {
newGriefList += "," + vote;
} else {
newGriefList = vote;
}
}
}
if (newGriefList != null) {
newGriefArray = newGriefList.split(",");
}
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
}
saveUsersConfig();
int rep = 0;
if (newVoteList != null) {
voteArray = newVoteList.split(",");
rep += newVoteArray.length;
}
if (newGriefList != null) {
griefArray = newGriefList.split(",");
rep -= newGriefArray.length;
}
sender.sendMessage(ChatColor.GOLD + "'Server' votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your Server Approval/Black-Ball votes were removed!");
if (rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else if (args[0].equalsIgnoreCase("clearall")) {
OfflinePlayer target = getServer().getOfflinePlayer(args[1]);
if (!target.hasPlayedBefore()) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + args[0] + ChatColor.RED + "not found!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
if (griefList == null && voteList == null) {
sender.sendMessage(ChatColor.RED + "Player " + ChatColor.WHITE + target.getName() + ChatColor.RED + " does not have any votes!");
return true;
}
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(ChatColor.GOLD + "ALL votes removed from " + ChatColor.WHITE + target.getName());
if (target.isOnline()) {
target.getPlayer().sendMessage(ChatColor.RED + "Your reputation was reset to 0!");
if (0 >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setApproved(target.getPlayer());
}
else if (0 < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
setGriefer(target.getPlayer());
}
} else {
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has been reset to 0!");
}
}
}
this.saveUsersConfig();
return true;
}
else {
sender.sendMessage(ChatColor.RED + "Command not recognised!");
return false;
}
}
return false;
}
else if (commandLabel.equalsIgnoreCase("greylist") || commandLabel.equalsIgnoreCase("gl") || commandLabel.equalsIgnoreCase("trust")) {
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
if (!sender.hasPermission("greylistvote.vote")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".votes", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + "!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to " + reqVotes + " by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to " + reqVotes + " by the Server!");
}
}
if (target.isOnline() && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setApproved(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName().equalsIgnoreCase(target.getName())) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
boolean found = false;
if (voteArray != null) {
for (String vote : voteArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (griefArray != null) {
String newGriefList = null;
for (String vote : griefArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
if (newGriefList != null) {
newGriefList += "," + vote;
} else {
newGriefList = vote;
}
}
}
if (newGriefList != null) {
newGriefList = newGriefList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".griefer", newGriefList);
griefArray = newGriefList.split(",");
}
else {
griefArray = null;
usersConfig.set(target.getName().toLowerCase() + ".griefer", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
// Tell everyone about the reputation change
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " increased " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GREEN + " increased your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (voteList.equals("")) {
voteList = sender.getName();
}
else {
voteList = voteList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".votes", voteList);
voteArray = voteList.split(",");
int rep = 0;
if (voteArray.length != 0) {
rep += voteArray.length;
}
if (griefArray != null) {
if (griefArray.length != 0) {
rep -= griefArray.length;
}
}
if (target.isOnline() && rep >= reqVotes && !target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setApproved(target.getPlayer());
}
else if (!target.isOnline() && rep >= reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has reached " + reqVotes + "!");
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + " can now build!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("griefer")) {
if (!sender.hasPermission("greylistvote.griefer")) {
sender.sendMessage(ChatColor.RED + "You do not have permission to vote!");
return true;
}
if (args.length != 1) {
// No player specified or too many arguments
return false;
}
else {
OfflinePlayer target = getServer().getOfflinePlayer(args[0]);
if (!target.hasPlayedBefore()) {
// Player not online
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
int reqVotes = this.config.getInt("required_votes");
String griefList = this.usersConfig.getString(target.getName().toLowerCase() + ".griefer", null);
String voteList = this.usersConfig.getString(target.getName().toLowerCase() + ".votes", null);
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
}
else {
voteList = "";
}
if (griefList != null) {
griefArray = griefList.split(",");
}
else {
griefList = "";
}
if (!(sender instanceof Player)) {
// Voter is the console
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", "Server");
this.usersConfig.set(target.getName().toLowerCase() + ".votes", null);
sender.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation was set to -1 by the Server!");
}
else {
target.getPlayer().sendMessage(ChatColor.GOLD + "Your reputation was set to -1 by the Server!");
}
}
if (target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
this.setGriefer(target.getPlayer());
}
this.saveUsersConfig();
return true;
}
if (sender.getName() == target.getName()) {
// Player voting for self
sender.sendMessage(ChatColor.RED + "You cannot vote for yourself!");
return true;
}
boolean found = false;
if (griefArray != null) {
for (String vote : griefArray) {
if (vote.equalsIgnoreCase(sender.getName())) {
found = true;
}
}
}
if (found) {
// Voter has already voted for this target player
sender.sendMessage(ChatColor.RED + "You have already voted for " + ChatColor.WHITE + target.getName());
return true;
}
if (voteArray != null) {
String newVoteList = null;
for (String vote : voteArray) {
if (!vote.equalsIgnoreCase(sender.getName())) {
if (newVoteList != null) {
newVoteList += "," + vote;
} else {
newVoteList = vote;
}
}
}
if (newVoteList != null) {
newVoteList = newVoteList.replaceFirst(",", "");
usersConfig.set(target.getName().toLowerCase() + ".votes", newVoteList);
voteArray = newVoteList.split(",");
}
else {
voteArray = null;
usersConfig.set(target.getName().toLowerCase() + ".votes", null);
}
}
sender.sendMessage(ChatColor.GOLD + "You have reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
Player[] onlinePlayers = getServer().getOnlinePlayers();
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName() && chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.GOLD + " reduced " + ChatColor.WHITE + target.getName() + ChatColor.GOLD + "'s reputation!");
}
else if (chatPlayer.getName() != sender.getName()) {
chatPlayer.sendMessage(sender.getName() + ChatColor.RED + " reduced your reputation!");
chatPlayer.sendMessage(ChatColor.GOLD + "Type " + ChatColor.WHITE + "/votelist" + ChatColor.GOLD + " to check your reputation.");
}
}
if (griefList.equals("")) {
griefList = sender.getName();
}
else {
griefList = griefList + "," + sender.getName();
}
this.usersConfig.set(target.getName().toLowerCase() + ".griefer", griefList);
griefArray = griefList.split(",");
int rep = 0;
if (voteArray != null) {
rep += voteArray.length;
}
if (griefArray != null) {
rep -= griefArray.length;
}
if (target.isOnline() && rep < reqVotes && target.getPlayer().hasPermission("greylistvote.build") && !target.getPlayer().hasPermission("greylistvote.approved")) {
// Enough votes received
this.setGriefer(target.getPlayer());
}
else if (!target.isOnline() && rep < reqVotes) {
for (Player chatPlayer : onlinePlayers) {
if (chatPlayer.getName() != target.getName()) {
chatPlayer.sendMessage(target.getName() + ChatColor.GOLD + "'s reputation has dropped below " + reqVotes + "!");
}
}
}
this.saveUsersConfig();
return true;
}
}
else if (commandLabel.equalsIgnoreCase("votelist") || commandLabel.equalsIgnoreCase("glvlist")) {
if (args.equals(null) || args.length == 0) {
String voteList = this.usersConfig.getString(sender.getName().toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(sender.getName().toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(ChatColor.GOLD + "You have not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(ChatColor.GOLD + "You have received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
else {
OfflinePlayer checktarget = getServer().getOfflinePlayer(args[0]);
String DN = null;
String target = null;
if (checktarget.isOnline()) {
target = checktarget.getPlayer().getName();
DN = checktarget.getPlayer().getDisplayName();
}
else {
if (checktarget != null) {
target = checktarget.getName();
DN = checktarget.getName();
}
}
if (target == null) {
// Player not found
sender.sendMessage(args[0] + ChatColor.RED + " not found!");
return false;
}
String voteList = this.usersConfig.getString(target.toLowerCase() + ".votes", null);
String griefList = this.usersConfig.getString(target.toLowerCase() + ".griefer", null);
int reqVotes = config.getInt("required_votes");
if (voteList == null && griefList == null) {
sender.sendMessage(DN + ChatColor.GOLD + " has not received any votes.");
sender.sendMessage(ChatColor.GOLD + "Current Reputation: " + ChatColor.WHITE + "0");
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
else {
sender.sendMessage(DN + ChatColor.GOLD + " has received votes from:");
int reputation = 0;
boolean serverVote = false;
String[] voteArray = null;
String[] griefArray = null;
if (voteList != null) {
voteArray = voteList.split(",");
if (voteArray.length != 0) {
String votes = ChatColor.GREEN + " Approvals: " + ChatColor.GOLD;
for (String vote : voteArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation ++;
}
if (serverVote) {
reputation = reqVotes;
}
sender.sendMessage(votes);
}
}
if (griefList != null) {
griefArray = griefList.split(",");
if (griefArray.length != 0) {
String votes = ChatColor.DARK_GRAY + " Black-Balls: " + ChatColor.GOLD;
serverVote = false;
for (String vote : griefArray) {
votes = votes + vote + " ";
if (vote.equals("Server")) {
serverVote = true;
}
reputation--;
}
if (serverVote) {
reputation = -1;
}
sender.sendMessage(votes);
}
}
String repText = "";
if (reputation >= reqVotes) {
repText = " " + ChatColor.GREEN + reputation;
}
else {
repText = " " + ChatColor.RED + reputation;
}
sender.sendMessage(ChatColor.GOLD + "Current Reputation:" + repText);
sender.sendMessage(ChatColor.GOLD + "Required Reputation: " + ChatColor.WHITE + reqVotes);
}
return true;
}
}
return false;
}
|
diff --git a/src/main/java/org/spout/vanilla/inventory/util/GridInventoryConverter.java b/src/main/java/org/spout/vanilla/inventory/util/GridInventoryConverter.java
index 25cc2cfa..75a8cdd2 100644
--- a/src/main/java/org/spout/vanilla/inventory/util/GridInventoryConverter.java
+++ b/src/main/java/org/spout/vanilla/inventory/util/GridInventoryConverter.java
@@ -1,66 +1,62 @@
/*
* This file is part of Vanilla.
*
* Copyright (c) 2011-2012, VanillaDev <http://www.spout.org/>
* Vanilla is licensed under the SpoutDev License Version 1.
*
* Vanilla is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, 180 days after any changes are published, you can use the
* software, incorporating those changes, under the terms of the MIT license,
* as described in the SpoutDev License Version 1.
*
* Vanilla is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License,
* the MIT license and the SpoutDev License Version 1 along with this program.
* If not, see <http://www.gnu.org/licenses/> for the GNU Lesser General Public
* License and see <http://www.spout.org/SpoutDevLicenseV1.txt> for the full license,
* including the MIT license.
*/
package org.spout.vanilla.inventory.util;
import org.spout.api.inventory.Inventory;
import org.spout.api.inventory.util.Grid;
import org.spout.api.inventory.util.GridIterator;
public class GridInventoryConverter extends InventoryConverter {
private final Grid grid;
private final int offset;
public GridInventoryConverter(Inventory inventory, int length, int offset) {
super(inventory, new int[inventory.getGrid(length).getSize()]);
grid = inventory.getGrid(length);
this.offset = offset;
GridIterator i = grid.iterator();
- int mod = offset * grid.getWidth();
while (i.hasNext()) {
- slots[i.next()] = i.getX() + mod;
- if (i.getX() == grid.getLength() - 1) {
- mod -= offset;
- }
+ slots[i.next()] = ((offset + grid.getSize()) - (length * i.getY())) - (length - i.getX());
}
}
public GridInventoryConverter(Inventory inventory, int length) {
this(inventory, length, 0);
}
public GridInventoryConverter translate(int offset) {
return new GridInventoryConverter(inventory, grid.getLength(), this.offset + offset);
}
public Grid getGrid() {
return grid;
}
public int getOffset() {
return offset;
}
}
| false | true | public GridInventoryConverter(Inventory inventory, int length, int offset) {
super(inventory, new int[inventory.getGrid(length).getSize()]);
grid = inventory.getGrid(length);
this.offset = offset;
GridIterator i = grid.iterator();
int mod = offset * grid.getWidth();
while (i.hasNext()) {
slots[i.next()] = i.getX() + mod;
if (i.getX() == grid.getLength() - 1) {
mod -= offset;
}
}
}
| public GridInventoryConverter(Inventory inventory, int length, int offset) {
super(inventory, new int[inventory.getGrid(length).getSize()]);
grid = inventory.getGrid(length);
this.offset = offset;
GridIterator i = grid.iterator();
while (i.hasNext()) {
slots[i.next()] = ((offset + grid.getSize()) - (length * i.getY())) - (length - i.getX());
}
}
|
diff --git a/blocks/sublima-app/src/main/java/com/computas/sublima/app/controller/admin/AdminController.java b/blocks/sublima-app/src/main/java/com/computas/sublima/app/controller/admin/AdminController.java
index 11299413..8975ccee 100644
--- a/blocks/sublima-app/src/main/java/com/computas/sublima/app/controller/admin/AdminController.java
+++ b/blocks/sublima-app/src/main/java/com/computas/sublima/app/controller/admin/AdminController.java
@@ -1,194 +1,194 @@
package com.computas.sublima.app.controller.admin;
import com.computas.sublima.app.adhoc.ConvertSublimaResources;
import com.computas.sublima.app.adhoc.ImportData;
import com.computas.sublima.app.service.AdminService;
import com.computas.sublima.app.service.IndexService;
import com.computas.sublima.app.service.LanguageService;
import com.computas.sublima.query.SparulDispatcher;
import com.computas.sublima.query.service.SettingsService;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sparql.util.StringUtils;
import org.apache.cocoon.auth.ApplicationUtil;
import org.apache.cocoon.auth.User;
import org.apache.cocoon.components.flow.apples.AppleRequest;
import org.apache.cocoon.components.flow.apples.AppleResponse;
import org.apache.cocoon.components.flow.apples.StatelessAppleController;
import org.apache.log4j.Logger;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.net.URLEncoder;
import java.net.URL;
import java.net.HttpURLConnection;
import java.util.HashMap;
import java.util.Map;
/**
* @author: mha
* Date: 31.mar.2008
*/
public class AdminController implements StatelessAppleController {
private SparulDispatcher sparulDispatcher;
AdminService adminService = new AdminService();
private ApplicationUtil appUtil = new ApplicationUtil();
private User user;
private String userPrivileges = "<empty/>";
String[] completePrefixArray = {
"PREFIX dct: <http://purl.org/dc/terms/>",
"PREFIX foaf: <http://xmlns.com/foaf/0.1/>",
"PREFIX sub: <http://xmlns.computas.com/sublima#>",
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>",
"PREFIX wdr: <http://www.w3.org/2007/05/powder#>",
"PREFIX skos: <http://www.w3.org/2004/02/skos/core#>",
"PREFIX lingvoj: <http://www.lingvoj.org/ontology#>"};
String completePrefixes = StringUtils.join("\n", completePrefixArray);
private static Logger logger = Logger.getLogger(AdminController.class);
ConvertSublimaResources convert = new ConvertSublimaResources();
@SuppressWarnings("unchecked")
public void process(AppleRequest req, AppleResponse res) throws Exception {
String mode = req.getSitemapParameter("mode");
String submode = req.getSitemapParameter("submode");
if (appUtil.getUser() != null) {
user = appUtil.getUser();
userPrivileges = adminService.getRolePrivilegesAsXML(user.getAttribute("role").toString());
}
LanguageService langServ = new LanguageService();
String language = langServ.checkLanguage(req, res);
logger.trace("AdminController: Language from sitemap is " + req.getSitemapParameter("interface-language"));
logger.trace("AdminController: Language from service is " + language);
if ("".equalsIgnoreCase(mode)) {
Map<String, Object> bizData = new HashMap<String, Object>();
bizData.put("facets", adminService.getMostOfTheRequestXMLWithPrefix(req) + "</c:request>");
System.gc();
res.sendPage("xml2/admin", bizData);
} else if ("testsparql".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparql", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
res.redirectTo(req.getCocoonRequest().getContextPath() + "/sparql?query=" + URLEncoder.encode(query, "UTF-8"));
}
} else if ("testsparul".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparul", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
boolean deleteResourceSuccess = sparulDispatcher.query(query);
logger.trace("TestSparul:\n" + query);
logger.trace("TestSparul result: " + deleteResourceSuccess);
System.gc();
res.sendPage("xhtml/testsparul", null);
}
} else if ("database".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("upload".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("export".equalsIgnoreCase(submode)) {
exportOntologyToXML(res, req);
}
} else if ("index".equalsIgnoreCase(mode)) {
- if ("".equals(submode)) {
+ if ("index".equals(submode)) {
if (req.getCocoonRequest().getMethod().equalsIgnoreCase("POST")) {
index(res, req);
} else if (req.getCocoonRequest().getMethod().equalsIgnoreCase("GET")) {
showIndexStatus(res, req);
}
}
} else {
res.sendStatus(404);
}
}
private void showIndexStatus(AppleResponse res, AppleRequest req) {
// Les indexstatistikk fra databasen
Map<String, Object> bizData = new HashMap<String, Object>();
bizData.put("index", adminService.getIndexStatisticsAsXML());
bizData.put("facets", adminService.getMostOfTheRequestXMLWithPrefix(req) + "</c:request>");
bizData.put("userprivileges", userPrivileges);
res.sendPage("xml2/index", bizData);
}
private void index(AppleResponse res, AppleRequest req) {
IndexService is = new IndexService();
is.createResourceIndex();
is.createTopicIndex();
showIndexStatus(res, req);
}
private void exportOntologyToXML(AppleResponse res, AppleRequest req) throws Exception {
adminService.insertSubjectOf();
Map<String, Object> bizData = new HashMap<String, Object>();
String type = req.getCocoonRequest().getParameter("type");
String query ="CONSTRUCT {?s ?p ?o} FROM <" + SettingsService.getProperty("sublima.basegraph") + "> WHERE {?s ?p ?o}";
String url = SettingsService.getProperty("sublima.sparql.endpoint") + "?query=" + URLEncoder.encode(query, "UTF-8");
URL u = new URL(url);
HttpURLConnection con = (HttpURLConnection) u.openConnection();
Model model = ModelFactory.createDefaultModel();
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.read(con.getInputStream(), "");
model.write(out, type);
bizData.put("ontology", out.toString());
out.close();
model.close();
System.gc();
res.sendPage("nostyle/export", bizData);
adminService.deleteSubjectOf();
}
private void uploadForm(AppleResponse res, AppleRequest req) {
Map<String, Object> bizData = new HashMap<String, Object>();
bizData.put("facets", adminService.getMostOfTheRequestXMLWithPrefix(req) + "</c:request>");
bizData.put("userprivileges", userPrivileges);
if (req.getCocoonRequest().getMethod().equalsIgnoreCase("GET")) {
System.gc();
res.sendPage("xml2/upload", bizData);
} else if (req.getCocoonRequest().getMethod().equalsIgnoreCase("POST")) {
if (req.getCocoonRequest().getParameter("location") != null) {
String type = req.getCocoonRequest().getParameter("type");
File file = new File(req.getCocoonRequest().getParameter("location"));
ImportData id = new ImportData();
try {
ConvertSublimaResources.applyRules(file.toURL().toString(), type, file.getCanonicalPath(), type);
id.load(file.toURL().toString(), type);
} catch (Exception e) {
logger.trace("AdminController.uploadForm --> Error during loading of resource");
e.printStackTrace();
}
}
System.gc();
res.sendPage("xml2/upload", bizData);
adminService.deleteSubjectOf();
}
}
public void setSparulDispatcher(SparulDispatcher sparulDispatcher) {
this.sparulDispatcher = sparulDispatcher;
}
}
| true | true | public void process(AppleRequest req, AppleResponse res) throws Exception {
String mode = req.getSitemapParameter("mode");
String submode = req.getSitemapParameter("submode");
if (appUtil.getUser() != null) {
user = appUtil.getUser();
userPrivileges = adminService.getRolePrivilegesAsXML(user.getAttribute("role").toString());
}
LanguageService langServ = new LanguageService();
String language = langServ.checkLanguage(req, res);
logger.trace("AdminController: Language from sitemap is " + req.getSitemapParameter("interface-language"));
logger.trace("AdminController: Language from service is " + language);
if ("".equalsIgnoreCase(mode)) {
Map<String, Object> bizData = new HashMap<String, Object>();
bizData.put("facets", adminService.getMostOfTheRequestXMLWithPrefix(req) + "</c:request>");
System.gc();
res.sendPage("xml2/admin", bizData);
} else if ("testsparql".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparql", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
res.redirectTo(req.getCocoonRequest().getContextPath() + "/sparql?query=" + URLEncoder.encode(query, "UTF-8"));
}
} else if ("testsparul".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparul", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
boolean deleteResourceSuccess = sparulDispatcher.query(query);
logger.trace("TestSparul:\n" + query);
logger.trace("TestSparul result: " + deleteResourceSuccess);
System.gc();
res.sendPage("xhtml/testsparul", null);
}
} else if ("database".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("upload".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("export".equalsIgnoreCase(submode)) {
exportOntologyToXML(res, req);
}
} else if ("index".equalsIgnoreCase(mode)) {
if ("".equals(submode)) {
if (req.getCocoonRequest().getMethod().equalsIgnoreCase("POST")) {
index(res, req);
} else if (req.getCocoonRequest().getMethod().equalsIgnoreCase("GET")) {
showIndexStatus(res, req);
}
}
} else {
res.sendStatus(404);
}
}
| public void process(AppleRequest req, AppleResponse res) throws Exception {
String mode = req.getSitemapParameter("mode");
String submode = req.getSitemapParameter("submode");
if (appUtil.getUser() != null) {
user = appUtil.getUser();
userPrivileges = adminService.getRolePrivilegesAsXML(user.getAttribute("role").toString());
}
LanguageService langServ = new LanguageService();
String language = langServ.checkLanguage(req, res);
logger.trace("AdminController: Language from sitemap is " + req.getSitemapParameter("interface-language"));
logger.trace("AdminController: Language from service is " + language);
if ("".equalsIgnoreCase(mode)) {
Map<String, Object> bizData = new HashMap<String, Object>();
bizData.put("facets", adminService.getMostOfTheRequestXMLWithPrefix(req) + "</c:request>");
System.gc();
res.sendPage("xml2/admin", bizData);
} else if ("testsparql".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparql", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
res.redirectTo(req.getCocoonRequest().getContextPath() + "/sparql?query=" + URLEncoder.encode(query, "UTF-8"));
}
} else if ("testsparul".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
System.gc();
res.sendPage("xhtml/testsparul", null);
} else {
String query = req.getCocoonRequest().getParameter("query");
boolean deleteResourceSuccess = sparulDispatcher.query(query);
logger.trace("TestSparul:\n" + query);
logger.trace("TestSparul result: " + deleteResourceSuccess);
System.gc();
res.sendPage("xhtml/testsparul", null);
}
} else if ("database".equalsIgnoreCase(mode)) {
if ("".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("upload".equalsIgnoreCase(submode)) {
uploadForm(res, req);
} else if ("export".equalsIgnoreCase(submode)) {
exportOntologyToXML(res, req);
}
} else if ("index".equalsIgnoreCase(mode)) {
if ("index".equals(submode)) {
if (req.getCocoonRequest().getMethod().equalsIgnoreCase("POST")) {
index(res, req);
} else if (req.getCocoonRequest().getMethod().equalsIgnoreCase("GET")) {
showIndexStatus(res, req);
}
}
} else {
res.sendStatus(404);
}
}
|
diff --git a/src/com/android/settings/wifi/WifiConfigController.java b/src/com/android/settings/wifi/WifiConfigController.java
index 3efedf604..0d2429886 100644
--- a/src/com/android/settings/wifi/WifiConfigController.java
+++ b/src/com/android/settings/wifi/WifiConfigController.java
@@ -1,733 +1,732 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings.wifi;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.net.DhcpInfo;
import android.net.LinkAddress;
import android.net.LinkProperties;
import android.net.NetworkInfo.DetailedState;
import android.net.NetworkUtils;
import android.net.Proxy;
import android.net.ProxyProperties;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiConfiguration.IpAssignment;
import android.net.wifi.WifiConfiguration.AuthAlgorithm;
import android.net.wifi.WifiConfiguration.KeyMgmt;
import android.net.wifi.WpsConfiguration;
import android.net.wifi.WpsConfiguration.Setup;
import static android.net.wifi.WifiConfiguration.INVALID_NETWORK_ID;
import android.net.wifi.WifiConfiguration.ProxySettings;
import android.net.wifi.WifiInfo;
import android.security.Credentials;
import android.security.KeyStore;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.text.format.Formatter;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.android.settings.ProxySelector;
import com.android.settings.R;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Iterator;
/**
* The class for allowing UIs like {@link WifiDialog} and {@link WifiConfigPreference} to
* share the logic for controlling buttons, text fields, etc.
*/
public class WifiConfigController implements TextWatcher,
View.OnClickListener, AdapterView.OnItemSelectedListener {
private static final String KEYSTORE_SPACE = "keystore://";
private final WifiConfigUiBase mConfigUi;
private final View mView;
private final AccessPoint mAccessPoint;
private boolean mEdit;
private TextView mSsidView;
// e.g. AccessPoint.SECURITY_NONE
private int mAccessPointSecurity;
private TextView mPasswordView;
private Spinner mSecuritySpinner;
private Spinner mEapMethodSpinner;
private Spinner mEapCaCertSpinner;
private Spinner mPhase2Spinner;
private Spinner mEapUserCertSpinner;
private TextView mEapIdentityView;
private TextView mEapAnonymousView;
/* This value comes from "wifi_ip_settings" resource array */
private static final int DHCP = 0;
private static final int STATIC_IP = 1;
/* These values come from "wifi_network_setup" resource array */
public static final int MANUAL = 0;
public static final int WPS_PBC = 1;
public static final int WPS_PIN_FROM_ACCESS_POINT = 2;
public static final int WPS_PIN_FROM_DEVICE = 3;
/* These values come from "wifi_proxy_settings" resource array */
public static final int PROXY_NONE = 0;
public static final int PROXY_STATIC = 1;
private static final String TAG = "WifiConfigController";
private Spinner mNetworkSetupSpinner;
private Spinner mIpSettingsSpinner;
private TextView mIpAddressView;
private TextView mGatewayView;
private TextView mNetworkPrefixLengthView;
private TextView mDns1View;
private TextView mDns2View;
private Spinner mProxySettingsSpinner;
private TextView mProxyHostView;
private TextView mProxyPortView;
private TextView mProxyExclusionListView;
private IpAssignment mIpAssignment;
private ProxySettings mProxySettings;
private LinkProperties mLinkProperties = new LinkProperties();
// True when this instance is used in SetupWizard XL context.
private final boolean mInXlSetupWizard;
static boolean requireKeyStore(WifiConfiguration config) {
if (config == null) {
return false;
}
String values[] = {config.ca_cert.value(), config.client_cert.value(),
config.private_key.value()};
for (String value : values) {
if (value != null && value.startsWith(KEYSTORE_SPACE)) {
return true;
}
}
return false;
}
public WifiConfigController(
WifiConfigUiBase parent, View view, AccessPoint accessPoint, boolean edit) {
mConfigUi = parent;
mInXlSetupWizard = (parent instanceof WifiConfigUiForSetupWizardXL);
mView = view;
mAccessPoint = accessPoint;
mAccessPointSecurity = (accessPoint == null) ? AccessPoint.SECURITY_NONE :
accessPoint.security;
mEdit = edit;
final Context context = mConfigUi.getContext();
final Resources resources = context.getResources();
if (mAccessPoint == null) { // new network
mConfigUi.setTitle(R.string.wifi_add_network);
mSsidView = (TextView) mView.findViewById(R.id.ssid);
mSsidView.addTextChangedListener(this);
mSecuritySpinner = ((Spinner) mView.findViewById(R.id.security));
mSecuritySpinner.setOnItemSelectedListener(this);
if (mInXlSetupWizard) {
mView.findViewById(R.id.type_ssid).setVisibility(View.VISIBLE);
mView.findViewById(R.id.type_security).setVisibility(View.VISIBLE);
// We want custom layout. The content must be same as the other cases.
mSecuritySpinner.setAdapter(
new ArrayAdapter<String>(context, R.layout.wifi_setup_custom_list_item_1,
android.R.id.text1,
context.getResources().getStringArray(R.array.wifi_security)));
} else {
mView.findViewById(R.id.type).setVisibility(View.VISIBLE);
}
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
mConfigUi.setTitle(mAccessPoint.ssid);
mIpSettingsSpinner = (Spinner) mView.findViewById(R.id.ip_settings);
mIpSettingsSpinner.setOnItemSelectedListener(this);
mProxySettingsSpinner = (Spinner) mView.findViewById(R.id.proxy_settings);
mProxySettingsSpinner.setOnItemSelectedListener(this);
ViewGroup group = (ViewGroup) mView.findViewById(R.id.info);
DetailedState state = mAccessPoint.getState();
if (state != null) {
addRow(group, R.string.wifi_status, Summary.get(mConfigUi.getContext(), state));
}
String[] type = resources.getStringArray(R.array.wifi_security);
addRow(group, R.string.wifi_security, type[mAccessPoint.security]);
int level = mAccessPoint.getLevel();
if (level != -1) {
String[] signal = resources.getStringArray(R.array.wifi_signal);
addRow(group, R.string.wifi_signal, signal[level]);
}
WifiInfo info = mAccessPoint.getInfo();
if (info != null) {
addRow(group, R.string.wifi_speed, info.getLinkSpeed() + WifiInfo.LINK_SPEED_UNITS);
- // TODO: fix the ip address for IPv6.
- int address = info.getIpAddress();
- if (address != 0) {
- addRow(group, R.string.wifi_ip_address, Formatter.formatIpAddress(address));
- }
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
WifiConfiguration config = mAccessPoint.getConfig();
if (config.ipAssignment == IpAssignment.STATIC) {
mIpSettingsSpinner.setSelection(STATIC_IP);
} else {
mIpSettingsSpinner.setSelection(DHCP);
+ //Display IP addresses
+ for(InetAddress a : config.linkProperties.getAddresses()) {
+ addRow(group, R.string.wifi_ip_address, a.getHostAddress());
+ }
}
if (config.proxySettings == ProxySettings.STATIC) {
mProxySettingsSpinner.setSelection(PROXY_STATIC);
} else {
mProxySettingsSpinner.setSelection(PROXY_NONE);
}
}
/* Show network setup options only for a new network */
if (mAccessPoint.networkId == INVALID_NETWORK_ID && mAccessPoint.wpsAvailable) {
showNetworkSetupFields();
}
if (mAccessPoint.networkId == INVALID_NETWORK_ID || mEdit) {
showSecurityFields();
showIpConfigFields();
showProxyFields();
}
if (mEdit) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
if (state == null && level != -1) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_connect));
} else {
mView.findViewById(R.id.ip_fields).setVisibility(View.GONE);
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
mConfigUi.setForgetButton(context.getString(R.string.wifi_forget));
}
}
}
mConfigUi.setCancelButton(context.getString(R.string.wifi_cancel));
if (mConfigUi.getSubmitButton() != null) {
enableSubmitIfAppropriate();
}
}
private void addRow(ViewGroup group, int name, String value) {
View row = mConfigUi.getLayoutInflater().inflate(R.layout.wifi_dialog_row, group, false);
((TextView) row.findViewById(R.id.name)).setText(name);
((TextView) row.findViewById(R.id.value)).setText(value);
group.addView(row);
}
/* show submit button if the password is valid */
private void enableSubmitIfAppropriate() {
if ((mSsidView != null && mSsidView.length() == 0) ||
((mAccessPoint == null || mAccessPoint.networkId == INVALID_NETWORK_ID) &&
((mAccessPointSecurity == AccessPoint.SECURITY_WEP && mPasswordView.length() == 0) ||
(mAccessPointSecurity == AccessPoint.SECURITY_PSK && mPasswordView.length() < 8)))) {
mConfigUi.getSubmitButton().setEnabled(false);
} else {
mConfigUi.getSubmitButton().setEnabled(true);
}
}
/* package */ WifiConfiguration getConfig() {
if (mAccessPoint != null && mAccessPoint.networkId != INVALID_NETWORK_ID && !mEdit) {
return null;
}
WifiConfiguration config = new WifiConfiguration();
if (mAccessPoint == null) {
config.SSID = AccessPoint.convertToQuotedString(
mSsidView.getText().toString());
// If the user adds a network manually, assume that it is hidden.
config.hiddenSSID = true;
} else if (mAccessPoint.networkId == INVALID_NETWORK_ID) {
config.SSID = AccessPoint.convertToQuotedString(
mAccessPoint.ssid);
} else {
config.networkId = mAccessPoint.networkId;
}
switch (mAccessPointSecurity) {
case AccessPoint.SECURITY_NONE:
config.allowedKeyManagement.set(KeyMgmt.NONE);
break;
case AccessPoint.SECURITY_WEP:
config.allowedKeyManagement.set(KeyMgmt.NONE);
config.allowedAuthAlgorithms.set(AuthAlgorithm.OPEN);
config.allowedAuthAlgorithms.set(AuthAlgorithm.SHARED);
if (mPasswordView.length() != 0) {
int length = mPasswordView.length();
String password = mPasswordView.getText().toString();
// WEP-40, WEP-104, and 256-bit WEP (WEP-232?)
if ((length == 10 || length == 26 || length == 58) &&
password.matches("[0-9A-Fa-f]*")) {
config.wepKeys[0] = password;
} else {
config.wepKeys[0] = '"' + password + '"';
}
}
break;
case AccessPoint.SECURITY_PSK:
config.allowedKeyManagement.set(KeyMgmt.WPA_PSK);
if (mPasswordView.length() != 0) {
String password = mPasswordView.getText().toString();
if (password.matches("[0-9A-Fa-f]{64}")) {
config.preSharedKey = password;
} else {
config.preSharedKey = '"' + password + '"';
}
}
break;
case AccessPoint.SECURITY_EAP:
config.allowedKeyManagement.set(KeyMgmt.WPA_EAP);
config.allowedKeyManagement.set(KeyMgmt.IEEE8021X);
config.eap.setValue((String) mEapMethodSpinner.getSelectedItem());
config.phase2.setValue((mPhase2Spinner.getSelectedItemPosition() == 0) ? "" :
"auth=" + mPhase2Spinner.getSelectedItem());
config.ca_cert.setValue((mEapCaCertSpinner.getSelectedItemPosition() == 0) ? "" :
KEYSTORE_SPACE + Credentials.CA_CERTIFICATE +
(String) mEapCaCertSpinner.getSelectedItem());
config.client_cert.setValue((mEapUserCertSpinner.getSelectedItemPosition() == 0) ?
"" : KEYSTORE_SPACE + Credentials.USER_CERTIFICATE +
(String) mEapUserCertSpinner.getSelectedItem());
config.private_key.setValue((mEapUserCertSpinner.getSelectedItemPosition() == 0) ?
"" : KEYSTORE_SPACE + Credentials.USER_PRIVATE_KEY +
(String) mEapUserCertSpinner.getSelectedItem());
config.identity.setValue((mEapIdentityView.length() == 0) ? "" :
mEapIdentityView.getText().toString());
config.anonymous_identity.setValue((mEapAnonymousView.length() == 0) ? "" :
mEapAnonymousView.getText().toString());
if (mPasswordView.length() != 0) {
config.password.setValue(mPasswordView.getText().toString());
}
break;
default:
return null;
}
validateAndFetchIpAndProxyFields();
config.proxySettings = mProxySettings;
config.ipAssignment = mIpAssignment;
config.linkProperties = new LinkProperties(mLinkProperties);
return config;
}
private void validateAndFetchIpAndProxyFields() {
mLinkProperties.clear();
mIpAssignment = (mIpSettingsSpinner != null &&
mIpSettingsSpinner.getSelectedItemPosition() == STATIC_IP) ?
IpAssignment.STATIC : IpAssignment.DHCP;
if (mIpAssignment == IpAssignment.STATIC) {
//TODO: A better way to do this is to not dismiss the
//dialog as long as one of the fields is invalid
int result = validateIpConfigFields(mLinkProperties);
if (result != 0) {
mLinkProperties.clear();
Toast.makeText(mConfigUi.getContext(), result, Toast.LENGTH_LONG).show();
mIpAssignment = IpAssignment.UNASSIGNED;
}
}
mProxySettings = (mProxySettingsSpinner != null &&
mProxySettingsSpinner.getSelectedItemPosition() == PROXY_STATIC) ?
ProxySettings.STATIC : ProxySettings.NONE;
if (mProxySettings == ProxySettings.STATIC) {
String host = mProxyHostView.getText().toString();
String portStr = mProxyPortView.getText().toString();
String exclusionList = mProxyExclusionListView.getText().toString();
int port = 0;
int result = 0;
try {
port = Integer.parseInt(portStr);
result = ProxySelector.validate(host, portStr, exclusionList);
} catch (NumberFormatException e) {
result = R.string.proxy_error_invalid_port;
}
if (result == 0) {
ProxyProperties proxyProperties= new ProxyProperties(host, port, exclusionList);
mLinkProperties.setHttpProxy(proxyProperties);
} else {
Toast.makeText(mConfigUi.getContext(), result, Toast.LENGTH_LONG).show();
mProxySettings = ProxySettings.UNASSIGNED;
}
}
}
private int validateIpConfigFields(LinkProperties linkProperties) {
String ipAddr = mIpAddressView.getText().toString();
InetAddress inetAddr = null;
try {
inetAddr = NetworkUtils.numericToInetAddress(ipAddr);
} catch (IllegalArgumentException e) {
return R.string.wifi_ip_settings_invalid_ip_address;
}
int networkPrefixLength = -1;
try {
networkPrefixLength = Integer.parseInt(mNetworkPrefixLengthView.getText().toString());
} catch (NumberFormatException e) { }
if (networkPrefixLength < 0 || networkPrefixLength > 32) {
return R.string.wifi_ip_settings_invalid_network_prefix_length;
}
linkProperties.addLinkAddress(new LinkAddress(inetAddr, networkPrefixLength));
String gateway = mGatewayView.getText().toString();
InetAddress gatewayAddr = null;
try {
gatewayAddr = NetworkUtils.numericToInetAddress(gateway);
} catch (IllegalArgumentException e) {
return R.string.wifi_ip_settings_invalid_gateway;
}
linkProperties.addGateway(gatewayAddr);
String dns = mDns1View.getText().toString();
InetAddress dnsAddr = null;
try {
dnsAddr = NetworkUtils.numericToInetAddress(dns);
} catch (IllegalArgumentException e) {
return R.string.wifi_ip_settings_invalid_dns;
}
linkProperties.addDns(dnsAddr);
if (mDns2View.length() > 0) {
dns = mDns2View.getText().toString();
try {
dnsAddr = NetworkUtils.numericToInetAddress(dns);
} catch (IllegalArgumentException e) {
return R.string.wifi_ip_settings_invalid_dns;
}
linkProperties.addDns(dnsAddr);
}
return 0;
}
int chosenNetworkSetupMethod() {
if (mNetworkSetupSpinner != null) {
return mNetworkSetupSpinner.getSelectedItemPosition();
}
return MANUAL;
}
WpsConfiguration getWpsConfig() {
WpsConfiguration config = new WpsConfiguration();
switch (mNetworkSetupSpinner.getSelectedItemPosition()) {
case WPS_PBC:
config.setup = Setup.PBC;
break;
case WPS_PIN_FROM_ACCESS_POINT:
config.setup = Setup.PIN_FROM_ACCESS_POINT;
break;
case WPS_PIN_FROM_DEVICE:
config.setup = Setup.PIN_FROM_DEVICE;
break;
default:
config.setup = Setup.INVALID;
Log.e(TAG, "WPS not selected type");
return config;
}
config.pin = ((TextView) mView.findViewById(R.id.wps_pin)).getText().toString();
config.BSSID = (mAccessPoint != null) ? mAccessPoint.bssid : null;
validateAndFetchIpAndProxyFields();
config.proxySettings = mProxySettings;
config.ipAssignment = mIpAssignment;
config.linkProperties = new LinkProperties(mLinkProperties);
return config;
}
private void showSecurityFields() {
if (mInXlSetupWizard) {
// Note: XL SetupWizard won't hide "EAP" settings here.
if (!((WifiSettingsForSetupWizardXL)mConfigUi.getContext()).initSecurityFields(mView,
mAccessPointSecurity)) {
return;
}
}
if (mAccessPointSecurity == AccessPoint.SECURITY_NONE) {
mView.findViewById(R.id.security_fields).setVisibility(View.GONE);
return;
}
mView.findViewById(R.id.security_fields).setVisibility(View.VISIBLE);
if (mPasswordView == null) {
mPasswordView = (TextView) mView.findViewById(R.id.password);
mPasswordView.addTextChangedListener(this);
((CheckBox) mView.findViewById(R.id.show_password)).setOnClickListener(this);
if (mAccessPoint != null && mAccessPoint.networkId != INVALID_NETWORK_ID) {
mPasswordView.setHint(R.string.wifi_unchanged);
}
}
if (mAccessPointSecurity != AccessPoint.SECURITY_EAP) {
mView.findViewById(R.id.eap).setVisibility(View.GONE);
return;
}
mView.findViewById(R.id.eap).setVisibility(View.VISIBLE);
if (mEapMethodSpinner == null) {
mEapMethodSpinner = (Spinner) mView.findViewById(R.id.method);
mPhase2Spinner = (Spinner) mView.findViewById(R.id.phase2);
mEapCaCertSpinner = (Spinner) mView.findViewById(R.id.ca_cert);
mEapUserCertSpinner = (Spinner) mView.findViewById(R.id.user_cert);
mEapIdentityView = (TextView) mView.findViewById(R.id.identity);
mEapAnonymousView = (TextView) mView.findViewById(R.id.anonymous);
loadCertificates(mEapCaCertSpinner, Credentials.CA_CERTIFICATE);
loadCertificates(mEapUserCertSpinner, Credentials.USER_PRIVATE_KEY);
if (mAccessPoint != null && mAccessPoint.networkId != INVALID_NETWORK_ID) {
WifiConfiguration config = mAccessPoint.getConfig();
setSelection(mEapMethodSpinner, config.eap.value());
setSelection(mPhase2Spinner, config.phase2.value());
setCertificate(mEapCaCertSpinner, Credentials.CA_CERTIFICATE,
config.ca_cert.value());
setCertificate(mEapUserCertSpinner, Credentials.USER_PRIVATE_KEY,
config.private_key.value());
mEapIdentityView.setText(config.identity.value());
mEapAnonymousView.setText(config.anonymous_identity.value());
}
}
}
private void showNetworkSetupFields() {
mView.findViewById(R.id.setup_fields).setVisibility(View.VISIBLE);
if (mNetworkSetupSpinner == null) {
mNetworkSetupSpinner = (Spinner) mView.findViewById(R.id.network_setup);
mNetworkSetupSpinner.setOnItemSelectedListener(this);
}
int pos = mNetworkSetupSpinner.getSelectedItemPosition();
/* Show pin text input if needed */
if (pos == WPS_PIN_FROM_ACCESS_POINT) {
mView.findViewById(R.id.wps_fields).setVisibility(View.VISIBLE);
} else {
mView.findViewById(R.id.wps_fields).setVisibility(View.GONE);
}
/* show/hide manual security fields appropriately */
if ((pos == WPS_PIN_FROM_ACCESS_POINT) || (pos == WPS_PIN_FROM_DEVICE)
|| (pos == WPS_PBC)) {
mView.findViewById(R.id.security_fields).setVisibility(View.GONE);
} else {
mView.findViewById(R.id.security_fields).setVisibility(View.VISIBLE);
}
}
private void showIpConfigFields() {
WifiConfiguration config = null;
mView.findViewById(R.id.ip_fields).setVisibility(View.VISIBLE);
if (mAccessPoint != null && mAccessPoint.networkId != INVALID_NETWORK_ID) {
config = mAccessPoint.getConfig();
}
if (mIpSettingsSpinner.getSelectedItemPosition() == STATIC_IP) {
mView.findViewById(R.id.staticip).setVisibility(View.VISIBLE);
if (mIpAddressView == null) {
mIpAddressView = (TextView) mView.findViewById(R.id.ipaddress);
mGatewayView = (TextView) mView.findViewById(R.id.gateway);
mNetworkPrefixLengthView = (TextView) mView.findViewById(
R.id.network_prefix_length);
mDns1View = (TextView) mView.findViewById(R.id.dns1);
mDns2View = (TextView) mView.findViewById(R.id.dns2);
}
if (config != null) {
LinkProperties linkProperties = config.linkProperties;
Iterator<LinkAddress> iterator = linkProperties.getLinkAddresses().iterator();
if (iterator.hasNext()) {
LinkAddress linkAddress = iterator.next();
mIpAddressView.setText(linkAddress.getAddress().getHostAddress());
mNetworkPrefixLengthView.setText(Integer.toString(linkAddress
.getNetworkPrefixLength()));
}
Iterator<InetAddress>gateways = linkProperties.getGateways().iterator();
if (gateways.hasNext()) {
mGatewayView.setText(gateways.next().getHostAddress());
}
Iterator<InetAddress> dnsIterator = linkProperties.getDnses().iterator();
if (dnsIterator.hasNext()) {
mDns1View.setText(dnsIterator.next().getHostAddress());
}
if (dnsIterator.hasNext()) {
mDns2View.setText(dnsIterator.next().getHostAddress());
}
}
} else {
mView.findViewById(R.id.staticip).setVisibility(View.GONE);
}
}
private void showProxyFields() {
WifiConfiguration config = null;
mView.findViewById(R.id.proxy_settings_fields).setVisibility(View.VISIBLE);
if (mAccessPoint != null && mAccessPoint.networkId != INVALID_NETWORK_ID) {
config = mAccessPoint.getConfig();
}
if (mProxySettingsSpinner.getSelectedItemPosition() == PROXY_STATIC) {
mView.findViewById(R.id.proxy_warning_limited_support).setVisibility(View.VISIBLE);
mView.findViewById(R.id.proxy_fields).setVisibility(View.VISIBLE);
if (mProxyHostView == null) {
mProxyHostView = (TextView) mView.findViewById(R.id.proxy_hostname);
mProxyPortView = (TextView) mView.findViewById(R.id.proxy_port);
mProxyExclusionListView = (TextView) mView.findViewById(R.id.proxy_exclusionlist);
}
if (config != null) {
ProxyProperties proxyProperties = config.linkProperties.getHttpProxy();
if (proxyProperties != null) {
mProxyHostView.setText(proxyProperties.getHost());
mProxyPortView.setText(Integer.toString(proxyProperties.getPort()));
mProxyExclusionListView.setText(proxyProperties.getExclusionList());
}
}
} else {
mView.findViewById(R.id.proxy_warning_limited_support).setVisibility(View.GONE);
mView.findViewById(R.id.proxy_fields).setVisibility(View.GONE);
}
}
private void loadCertificates(Spinner spinner, String prefix) {
final Context context = mConfigUi.getContext();
final String unspecified = context.getString(R.string.wifi_unspecified);
String[] certs = KeyStore.getInstance().saw(prefix);
if (certs == null || certs.length == 0) {
certs = new String[] {unspecified};
} else {
final String[] array = new String[certs.length + 1];
array[0] = unspecified;
System.arraycopy(certs, 0, array, 1, certs.length);
certs = array;
}
final ArrayAdapter<String> adapter = new ArrayAdapter<String>(
context, android.R.layout.simple_spinner_item, certs);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
spinner.setAdapter(adapter);
}
private void setCertificate(Spinner spinner, String prefix, String cert) {
prefix = KEYSTORE_SPACE + prefix;
if (cert != null && cert.startsWith(prefix)) {
setSelection(spinner, cert.substring(prefix.length()));
}
}
private void setSelection(Spinner spinner, String value) {
if (value != null) {
ArrayAdapter<String> adapter = (ArrayAdapter<String>) spinner.getAdapter();
for (int i = adapter.getCount() - 1; i >= 0; --i) {
if (value.equals(adapter.getItem(i))) {
spinner.setSelection(i);
break;
}
}
}
}
public boolean isEdit() {
return mEdit;
}
@Override
public void afterTextChanged(Editable s) {
enableSubmitIfAppropriate();
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void onClick(View view) {
mPasswordView.setInputType(
InputType.TYPE_CLASS_TEXT | (((CheckBox) view).isChecked() ?
InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD :
InputType.TYPE_TEXT_VARIATION_PASSWORD));
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if (parent == mSecuritySpinner) {
mAccessPointSecurity = position;
showSecurityFields();
enableSubmitIfAppropriate();
} else if (parent == mNetworkSetupSpinner) {
showNetworkSetupFields();
} else if (parent == mProxySettingsSpinner) {
showProxyFields();
} else {
showIpConfigFields();
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
}
| false | true | public WifiConfigController(
WifiConfigUiBase parent, View view, AccessPoint accessPoint, boolean edit) {
mConfigUi = parent;
mInXlSetupWizard = (parent instanceof WifiConfigUiForSetupWizardXL);
mView = view;
mAccessPoint = accessPoint;
mAccessPointSecurity = (accessPoint == null) ? AccessPoint.SECURITY_NONE :
accessPoint.security;
mEdit = edit;
final Context context = mConfigUi.getContext();
final Resources resources = context.getResources();
if (mAccessPoint == null) { // new network
mConfigUi.setTitle(R.string.wifi_add_network);
mSsidView = (TextView) mView.findViewById(R.id.ssid);
mSsidView.addTextChangedListener(this);
mSecuritySpinner = ((Spinner) mView.findViewById(R.id.security));
mSecuritySpinner.setOnItemSelectedListener(this);
if (mInXlSetupWizard) {
mView.findViewById(R.id.type_ssid).setVisibility(View.VISIBLE);
mView.findViewById(R.id.type_security).setVisibility(View.VISIBLE);
// We want custom layout. The content must be same as the other cases.
mSecuritySpinner.setAdapter(
new ArrayAdapter<String>(context, R.layout.wifi_setup_custom_list_item_1,
android.R.id.text1,
context.getResources().getStringArray(R.array.wifi_security)));
} else {
mView.findViewById(R.id.type).setVisibility(View.VISIBLE);
}
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
mConfigUi.setTitle(mAccessPoint.ssid);
mIpSettingsSpinner = (Spinner) mView.findViewById(R.id.ip_settings);
mIpSettingsSpinner.setOnItemSelectedListener(this);
mProxySettingsSpinner = (Spinner) mView.findViewById(R.id.proxy_settings);
mProxySettingsSpinner.setOnItemSelectedListener(this);
ViewGroup group = (ViewGroup) mView.findViewById(R.id.info);
DetailedState state = mAccessPoint.getState();
if (state != null) {
addRow(group, R.string.wifi_status, Summary.get(mConfigUi.getContext(), state));
}
String[] type = resources.getStringArray(R.array.wifi_security);
addRow(group, R.string.wifi_security, type[mAccessPoint.security]);
int level = mAccessPoint.getLevel();
if (level != -1) {
String[] signal = resources.getStringArray(R.array.wifi_signal);
addRow(group, R.string.wifi_signal, signal[level]);
}
WifiInfo info = mAccessPoint.getInfo();
if (info != null) {
addRow(group, R.string.wifi_speed, info.getLinkSpeed() + WifiInfo.LINK_SPEED_UNITS);
// TODO: fix the ip address for IPv6.
int address = info.getIpAddress();
if (address != 0) {
addRow(group, R.string.wifi_ip_address, Formatter.formatIpAddress(address));
}
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
WifiConfiguration config = mAccessPoint.getConfig();
if (config.ipAssignment == IpAssignment.STATIC) {
mIpSettingsSpinner.setSelection(STATIC_IP);
} else {
mIpSettingsSpinner.setSelection(DHCP);
}
if (config.proxySettings == ProxySettings.STATIC) {
mProxySettingsSpinner.setSelection(PROXY_STATIC);
} else {
mProxySettingsSpinner.setSelection(PROXY_NONE);
}
}
/* Show network setup options only for a new network */
if (mAccessPoint.networkId == INVALID_NETWORK_ID && mAccessPoint.wpsAvailable) {
showNetworkSetupFields();
}
if (mAccessPoint.networkId == INVALID_NETWORK_ID || mEdit) {
showSecurityFields();
showIpConfigFields();
showProxyFields();
}
if (mEdit) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
if (state == null && level != -1) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_connect));
} else {
mView.findViewById(R.id.ip_fields).setVisibility(View.GONE);
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
mConfigUi.setForgetButton(context.getString(R.string.wifi_forget));
}
}
}
mConfigUi.setCancelButton(context.getString(R.string.wifi_cancel));
if (mConfigUi.getSubmitButton() != null) {
enableSubmitIfAppropriate();
}
}
| public WifiConfigController(
WifiConfigUiBase parent, View view, AccessPoint accessPoint, boolean edit) {
mConfigUi = parent;
mInXlSetupWizard = (parent instanceof WifiConfigUiForSetupWizardXL);
mView = view;
mAccessPoint = accessPoint;
mAccessPointSecurity = (accessPoint == null) ? AccessPoint.SECURITY_NONE :
accessPoint.security;
mEdit = edit;
final Context context = mConfigUi.getContext();
final Resources resources = context.getResources();
if (mAccessPoint == null) { // new network
mConfigUi.setTitle(R.string.wifi_add_network);
mSsidView = (TextView) mView.findViewById(R.id.ssid);
mSsidView.addTextChangedListener(this);
mSecuritySpinner = ((Spinner) mView.findViewById(R.id.security));
mSecuritySpinner.setOnItemSelectedListener(this);
if (mInXlSetupWizard) {
mView.findViewById(R.id.type_ssid).setVisibility(View.VISIBLE);
mView.findViewById(R.id.type_security).setVisibility(View.VISIBLE);
// We want custom layout. The content must be same as the other cases.
mSecuritySpinner.setAdapter(
new ArrayAdapter<String>(context, R.layout.wifi_setup_custom_list_item_1,
android.R.id.text1,
context.getResources().getStringArray(R.array.wifi_security)));
} else {
mView.findViewById(R.id.type).setVisibility(View.VISIBLE);
}
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
mConfigUi.setTitle(mAccessPoint.ssid);
mIpSettingsSpinner = (Spinner) mView.findViewById(R.id.ip_settings);
mIpSettingsSpinner.setOnItemSelectedListener(this);
mProxySettingsSpinner = (Spinner) mView.findViewById(R.id.proxy_settings);
mProxySettingsSpinner.setOnItemSelectedListener(this);
ViewGroup group = (ViewGroup) mView.findViewById(R.id.info);
DetailedState state = mAccessPoint.getState();
if (state != null) {
addRow(group, R.string.wifi_status, Summary.get(mConfigUi.getContext(), state));
}
String[] type = resources.getStringArray(R.array.wifi_security);
addRow(group, R.string.wifi_security, type[mAccessPoint.security]);
int level = mAccessPoint.getLevel();
if (level != -1) {
String[] signal = resources.getStringArray(R.array.wifi_signal);
addRow(group, R.string.wifi_signal, signal[level]);
}
WifiInfo info = mAccessPoint.getInfo();
if (info != null) {
addRow(group, R.string.wifi_speed, info.getLinkSpeed() + WifiInfo.LINK_SPEED_UNITS);
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
WifiConfiguration config = mAccessPoint.getConfig();
if (config.ipAssignment == IpAssignment.STATIC) {
mIpSettingsSpinner.setSelection(STATIC_IP);
} else {
mIpSettingsSpinner.setSelection(DHCP);
//Display IP addresses
for(InetAddress a : config.linkProperties.getAddresses()) {
addRow(group, R.string.wifi_ip_address, a.getHostAddress());
}
}
if (config.proxySettings == ProxySettings.STATIC) {
mProxySettingsSpinner.setSelection(PROXY_STATIC);
} else {
mProxySettingsSpinner.setSelection(PROXY_NONE);
}
}
/* Show network setup options only for a new network */
if (mAccessPoint.networkId == INVALID_NETWORK_ID && mAccessPoint.wpsAvailable) {
showNetworkSetupFields();
}
if (mAccessPoint.networkId == INVALID_NETWORK_ID || mEdit) {
showSecurityFields();
showIpConfigFields();
showProxyFields();
}
if (mEdit) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_save));
} else {
if (state == null && level != -1) {
mConfigUi.setSubmitButton(context.getString(R.string.wifi_connect));
} else {
mView.findViewById(R.id.ip_fields).setVisibility(View.GONE);
}
if (mAccessPoint.networkId != INVALID_NETWORK_ID) {
mConfigUi.setForgetButton(context.getString(R.string.wifi_forget));
}
}
}
mConfigUi.setCancelButton(context.getString(R.string.wifi_cancel));
if (mConfigUi.getSubmitButton() != null) {
enableSubmitIfAppropriate();
}
}
|
diff --git a/src/java/no/schibstedsok/front/searchportal/configuration/SearchModeFactory.java b/src/java/no/schibstedsok/front/searchportal/configuration/SearchModeFactory.java
index fb51cf3a3..8866e8dd3 100644
--- a/src/java/no/schibstedsok/front/searchportal/configuration/SearchModeFactory.java
+++ b/src/java/no/schibstedsok/front/searchportal/configuration/SearchModeFactory.java
@@ -1,621 +1,622 @@
// Copyright (2006) Schibsted Søk AS
package no.schibstedsok.front.searchportal.configuration;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import no.schibstedsok.common.ioc.BaseContext;
import no.schibstedsok.common.ioc.ContextWrapper;
import no.schibstedsok.front.searchportal.InfrastructureException;
import no.schibstedsok.front.searchportal.configuration.loader.DocumentContext;
import no.schibstedsok.front.searchportal.configuration.loader.DocumentLoader;
import no.schibstedsok.front.searchportal.executor.ParallelSearchCommandExecutor;
import no.schibstedsok.front.searchportal.executor.SearchCommandExecutor;
import no.schibstedsok.front.searchportal.executor.SequentialSearchCommandExecutor;
import no.schibstedsok.front.searchportal.output.TextOutputResultHandler;
import no.schibstedsok.front.searchportal.output.VelocityResultHandler;
import no.schibstedsok.front.searchportal.output.XmlOutputResultHandler;
import no.schibstedsok.front.searchportal.query.transform.ExactTitleMatchTransformer;
import no.schibstedsok.front.searchportal.query.transform.InfopageQueryTransformer;
import no.schibstedsok.front.searchportal.query.transform.NewsTransformer;
import no.schibstedsok.front.searchportal.query.transform.PrefixRemoverTransformer;
import no.schibstedsok.front.searchportal.query.transform.QueryTransformer;
import no.schibstedsok.front.searchportal.query.transform.SimpleSiteSearchTransformer;
import no.schibstedsok.front.searchportal.query.transform.SynonymQueryTransformer;
import no.schibstedsok.front.searchportal.query.transform.TermPrefixTransformer;
import no.schibstedsok.front.searchportal.query.transform.TvQueryTransformer;
import no.schibstedsok.front.searchportal.result.handler.AddDocCountModifier;
import no.schibstedsok.front.searchportal.result.handler.AgeCalculatorResultHandler;
import no.schibstedsok.front.searchportal.result.handler.CategorySplitter;
import no.schibstedsok.front.searchportal.result.handler.ContentSourceCollector;
import no.schibstedsok.front.searchportal.result.handler.DiscardOldNewsResultHandler;
import no.schibstedsok.front.searchportal.result.handler.FieldChooser;
import no.schibstedsok.front.searchportal.result.handler.FindFileFormat;
import no.schibstedsok.front.searchportal.result.handler.ImageHelper;
import no.schibstedsok.front.searchportal.result.handler.MultiValuedFieldCollector;
import no.schibstedsok.front.searchportal.result.handler.PhoneNumberChooser;
import no.schibstedsok.front.searchportal.result.handler.PhoneNumberFormatter;
import no.schibstedsok.front.searchportal.result.handler.ResultHandler;
import no.schibstedsok.front.searchportal.result.handler.SpellingSuggestionChooser;
import no.schibstedsok.front.searchportal.result.handler.SumFastModifiers;
import no.schibstedsok.front.searchportal.result.handler.TvEnrichmentDateFormatHandler;
import no.schibstedsok.front.searchportal.result.handler.WeatherCelciusHandler;
import no.schibstedsok.front.searchportal.result.handler.WeatherDateHandler;
import no.schibstedsok.front.searchportal.site.Site;
import no.schibstedsok.front.searchportal.site.SiteContext;
import no.schibstedsok.front.searchportal.util.SearchConstants;
import no.schibstedsok.front.searchportal.util.config.AbstractDocumentFactory;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* @author <a href="mailto:[email protected]>mick</a>
* @version <tt>$Revision: 2356 $</tt>
*/
public final class SearchModeFactory extends AbstractDocumentFactory{
/**
* The context any SearchModeFactory must work against. *
*/
public interface Context extends BaseContext, DocumentContext, SiteContext {}
// Constants -----------------------------------------------------
private static final Map<Site, SearchModeFactory> INSTANCES = new HashMap<Site,SearchModeFactory>();
private static final ReentrantReadWriteLock INSTANCES_LOCK = new ReentrantReadWriteLock();
private static final Map<SearchMode,Map<String,SearchConfiguration>> COMMANDS
= new HashMap<SearchMode,Map<String,SearchConfiguration>>();
private static final ReentrantReadWriteLock COMMANDS_LOCK = new ReentrantReadWriteLock();
private static final Logger LOG = Logger.getLogger(SearchModeFactory.class);
private static final String ERR_DOC_BUILDER_CREATION
= "Failed to DocumentBuilderFactory.newInstance().newDocumentBuilder()";
private static final String ERR_MISSING_IMPLEMENTATION = "Missing implementation case in CommandTypes";
private static final String ERR_ONLY_ONE_CHILD_NAVIGATOR_ALLOWED
= "Each FastNavigator is only allowed to have one child. Parent was ";
private static final String INFO_PARSING_MODE = "Parsing mode ";
private static final String INFO_PARSING_CONFIGURATION = " Parsing configuration ";
private static final String INFO_PARSING_NAVIGATOR = " Parsing navigator ";
private static final String INFO_PARSING_RESULT_HANDLER = " Parsing result handler ";
private static final String INFO_PARSING_QUERY_TRANSFORMER = " Parsing query transformer ";
// Attributes ----------------------------------------------------
private final Map<String,SearchMode> modes = new HashMap<String,SearchMode>();
private final ReentrantReadWriteLock modesLock = new ReentrantReadWriteLock();
private final DocumentLoader loader;
private final Context context;
private String templatePrefix;
// Static --------------------------------------------------------
public static SearchModeFactory getModeFactory(final Context cxt) {
final Site site = cxt.getSite();
INSTANCES_LOCK.readLock().lock();
SearchModeFactory instance = INSTANCES.get(site);
INSTANCES_LOCK.readLock().unlock();
if (instance == null) {
try {
instance = new SearchModeFactory(cxt);
} catch (ParserConfigurationException ex) {
LOG.error(ERR_DOC_BUILDER_CREATION,ex);
}
}
return instance;
}
// Constructors --------------------------------------------------
/** Creates a new instance of ModeFactoryImpl */
private SearchModeFactory(final Context cxt)
throws ParserConfigurationException {
LOG.trace("ModeFactory(cxt)");
context = cxt;
// configuration files
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(false);
final DocumentBuilder builder = factory.newDocumentBuilder();
loader = context.newDocumentLoader(SearchConstants.MODES_XMLFILE, builder);
// update the store of factories
INSTANCES_LOCK.writeLock().lock();
INSTANCES.put(context.getSite(), this);
INSTANCES_LOCK.writeLock().unlock();
// start initialisation
init();
}
// Public --------------------------------------------------------
public SearchMode getMode(final String id){
LOG.trace("getMode(" + id + ")");
SearchMode mode = getModeImpl(id);
if(mode == null && id != null && id.length() >0 && context.getSite().getParent() != null){
// not found in this site's modes.xml. look in parent's site.
final SearchModeFactory factory = getModeFactory( ContextWrapper.wrap(
Context.class,
new BaseContext(){
public Site getSite(){
return context.getSite().getParent();
}
},
context
));
mode = factory.getMode(id);
}
return mode;
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private void init(){
loader.abut();
LOG.debug("Parsing " + SearchConstants.MODES_XMLFILE + " started");
final Document doc = loader.getDocument();
final Element root = doc.getDocumentElement();
templatePrefix = root.getAttribute("template-prefix");
// loop through modes.
final NodeList modeList = root.getElementsByTagName("mode");
for (int i = 0; i < modeList.getLength(); ++i) {
final Element modeE = (Element) modeList.item(i);
final String id = modeE.getAttribute("id");
LOG.info(INFO_PARSING_MODE + modeE.getLocalName() + " " + id);
final SearchMode inherit = getMode( modeE.getAttribute("inherit") );
final SearchMode mode = new SearchMode(inherit);
mode.setId(id);
mode.setExecutor( parseExecutor( modeE.getAttribute("executor"), new SequentialSearchCommandExecutor() ) );
mode.setQueryAnalysisEnabled(parseBoolean( modeE.getAttribute("analysis"),
inherit != null ? inherit.isQueryAnalysisEnabled() : false));
// setup new commands list for this mode
final Map<String,SearchConfiguration> modesCommands = new HashMap<String,SearchConfiguration>();
COMMANDS_LOCK.writeLock().lock();
COMMANDS.put(mode, modesCommands);
COMMANDS_LOCK.writeLock().unlock();
// now loop through commands
for(CommandTypes commandType : CommandTypes.values()){
final NodeList commandsList = modeE.getElementsByTagName(commandType.getXmlName());
for (int j = 0; j < commandsList.getLength(); ++j) {
final Element commandE = (Element) commandsList.item(j);
final SearchConfiguration sc = commandType.parseSearchConfiguration(commandE, mode);
modesCommands.put(sc.getName(), sc);
mode.addSearchConfiguration(sc);
}
}
// add mode
modesLock.writeLock().lock();
modes.put(id, mode);
modesLock.writeLock().unlock();
}
// finished
LOG.debug("Parsing " + SearchConstants.MODES_XMLFILE + " finished");
}
private static SearchCommandExecutor parseExecutor(final String name, final SearchCommandExecutor def){
if( "parallel".equalsIgnoreCase(name) ){
return new ParallelSearchCommandExecutor();
}else if("sequential".equalsIgnoreCase(name)){
return new SequentialSearchCommandExecutor();
}
return def;
}
private SearchMode getModeImpl(final String id){
try{
modesLock.readLock().lock();
return modes.get(id);
}finally{
modesLock.readLock().unlock();
}
}
// Inner classes -------------------------------------------------
private enum CommandTypes {
COMMAND (AbstractSearchConfiguration.class),
FAST_COMMAND (FastConfiguration.class),
MATH_COMMAND (MathExpressionConfiguration.class),
NEWS_COMMAND (NewsSearchConfiguration.class),
OVERTURE_PPC_COMMAND(OverturePPCConfiguration.class),
PICTURE_COMMAND(PicSearchConfiguration.class),
SENSIS_COMMAND(SensisSearchConfiguration.class),
STOCK_COMMAND(StockSearchConfiguration.class),
WEB_COMMAND(WebSearchConfiguration.class),
WHITEPAGES_COMMAND(WhiteSearchConfiguration.class),
YELLOWPAGES_COMMAND(YellowSearchConfiguration.class);
private final Class<? extends SearchConfiguration> clazz;
private final String xmlName;
CommandTypes(final Class<? extends SearchConfiguration> clazz){
this.clazz = clazz;
xmlName = name().replaceAll("_","-").toLowerCase();
}
public String getXmlName(){
return xmlName;
}
public SearchConfiguration parseSearchConfiguration(
final Element commandE,
final SearchMode mode){
final SearchConfiguration inherit = findParent(commandE.getAttribute("inherit"), mode);
final String id = commandE.getAttribute("id");
LOG.info(INFO_PARSING_CONFIGURATION + commandE.getLocalName() + " " + id);
try {
final Constructor<? extends SearchConfiguration> con;
con = clazz.getConstructor(SearchConfiguration.class);
final SearchConfiguration sc;
sc = con.newInstance(inherit);
sc.setResultsToReturn(parseInt(commandE.getAttribute("results-to-return"),
inherit != null ? inherit.getResultsToReturn() : -1));
if( sc instanceof AbstractSearchConfiguration ){
// everything extends AbstractSearchConfiguration
final AbstractSearchConfiguration asc = (AbstractSearchConfiguration) sc;
final AbstractSearchConfiguration ascInherit = inherit instanceof AbstractSearchConfiguration
? (AbstractSearchConfiguration)inherit
: null;
asc.setName(id);
asc.setAlwaysRunEnabled( parseBoolean(commandE.getAttribute("always-run"),
ascInherit != null ? ascInherit.isAlwaysRunEnabled() : false) );
asc.setPagingEnabled( parseBoolean(commandE.getAttribute("paging"),
ascInherit != null ? ascInherit.isPagingEnabled() : false) );
asc.setUseParameterAsQuery( commandE.getAttribute("query-parameter") );
if( commandE.getAttribute("result-fields").length() >0 ){
final String[] resultFields = commandE.getAttribute("result-fields").split(",");
for( String resultField : resultFields ){
asc.addResultField(resultField);
}
}
asc.setStatisticsName(parseString(commandE.getAttribute("statistical-name"),
ascInherit != null ? ascInherit.getStatisticsName() : ""));
}
if( sc instanceof FastConfiguration ){
final FastConfiguration fsc = (FastConfiguration) sc;
final FastConfiguration fscInherit = inherit instanceof FastConfiguration
? (FastConfiguration)inherit
: null;
fsc.setClusteringEnabled(parseBoolean(commandE.getAttribute("clustering"),
fscInherit != null ? fscInherit.isClusteringEnabled() : false));
fsc.setCollapsingEnabled(parseBoolean(commandE.getAttribute("collapsing"),
fscInherit != null ? fscInherit.isCollapsingEnabled() : false));
//fsc.setCollectionFilterString(commandE.getAttribute("collection-filter-string")); // FIXME !!
if( commandE.getAttribute("collections").length() >0 ){
final String[] collections = commandE.getAttribute("collections").split(",");
for(String collection : collections){
fsc.addCollection(collection);
}
}
fsc.setFilter(parseString(commandE.getAttribute("filter"),
fscInherit != null ? fscInherit.getFilter() : ""));
fsc.setIgnoreNavigationEnabled(parseBoolean(commandE.getAttribute("ignore-navigation"),
fscInherit != null ? fscInherit.isIgnoreNavigationEnabled() : false));
fsc.setOffensiveScoreLimit(parseInt(commandE.getAttribute("offensive-score-limit"),
fscInherit != null ? fscInherit.getOffensiveScoreLimit() : -1));
fsc.setQtPipeline(parseString(commandE.getAttribute("qt-pipeline"),
fscInherit != null ? fscInherit.getQtPipeline() : ""));
fsc.setQueryServerURL(parseString(commandE.getAttribute("query-server-url"),
fscInherit != null ? fscInherit.getQueryServerURL() : null));
fsc.setRelevantQueriesEnabled(parseBoolean(commandE.getAttribute("relevant-queries"),
fscInherit != null ? fscInherit.isRelevantQueriesEnabled() : false));
fsc.setSortBy(parseString(commandE.getAttribute("sort-by"),
fscInherit != null ? fscInherit.getSortBy() : ""));
fsc.setSpamScoreLimit( parseInt(commandE.getAttribute("spam-score-limit"),
fscInherit != null ? fscInherit.getSpamScoreLimit() : -1));
fsc.setSpellcheckEnabled( parseBoolean(commandE.getAttribute("spellcheck"),
fscInherit != null ? fscInherit.isSpellcheckEnabled() : false));
//fsc.setSynonymEnabled(Boolean.parseBoolean(commandE.getAttribute("synonyms"))); // FIXME !!
// navigators
final NodeList nList = commandE.getElementsByTagName("navigators");
for( int i = 0; i < nList.getLength(); ++i){
final Collection<FastNavigator> navigators = parseNavigators( (Element)nList.item(i));
for(FastNavigator navigator : navigators ){
fsc.addNavigator(navigator, navigator.getId());
}
}
}
if( sc instanceof MathExpressionConfiguration ){
final MathExpressionConfiguration msc = (MathExpressionConfiguration) sc;
}
if( sc instanceof NewsSearchConfiguration ){
final NewsSearchConfiguration nsc = (NewsSearchConfiguration) sc;
}
if( sc instanceof OverturePPCConfiguration ){
final OverturePPCConfiguration osc = (OverturePPCConfiguration) sc;
final OverturePPCConfiguration oscInherit = inherit instanceof OverturePPCConfiguration
? (OverturePPCConfiguration)inherit
: null;
osc.setPartnerId(parseString(commandE.getAttribute("partner-id"),
oscInherit != null ? oscInherit.getPartnerId() : ""));
}
if( sc instanceof PicSearchConfiguration ){
final PicSearchConfiguration psc = (PicSearchConfiguration) sc;
}
if( sc instanceof SensisSearchConfiguration ){
final SensisSearchConfiguration ssc = (SensisSearchConfiguration) sc;
}
if( sc instanceof StockSearchConfiguration ){
final StockSearchConfiguration ssc = (StockSearchConfiguration) sc;
}
if( sc instanceof WebSearchConfiguration ){
final WebSearchConfiguration wsc = (WebSearchConfiguration) sc;
}
if( sc instanceof WhiteSearchConfiguration ){
final WhiteSearchConfiguration wsc = (WhiteSearchConfiguration) sc;
}
if( sc instanceof YellowSearchConfiguration ){
final YellowSearchConfiguration ysc = (YellowSearchConfiguration) sc;
}
// query transformers
final NodeList qtNodeList = commandE.getElementsByTagName("query-transformers");
+ final Element qtRootElement = (Element) qtNodeList.item(0);
for( QueryTransformerTypes qtType : QueryTransformerTypes.values()){
- final NodeList qtList = commandE.getElementsByTagName(qtType.getXmlName());
+ final NodeList qtList = qtRootElement.getElementsByTagName(qtType.getXmlName());
for( int i = 0 ; i < qtList.getLength(); ++i ){
final Element qt = (Element) qtList.item(i);
sc.addQueryTransformer(qtType.parseQueryTransformer(qt));
}
}
// result handlers
final NodeList rhNodeList = commandE.getElementsByTagName("result-handler");
- final List<ResultHandler> handlers = new ArrayList<ResultHandler>();
+ final Element rhRootElement = (Element) rhNodeList.item(0);
for( ResultHandlerTypes rhType : ResultHandlerTypes.values()){
- final NodeList rhList = commandE.getElementsByTagName(rhType.getXmlName());
+ final NodeList rhList = rhRootElement.getElementsByTagName(rhType.getXmlName());
for( int i = 0 ; i < rhList.getLength(); ++i ){
final Element rh = (Element) rhList.item(i);
sc.addResultHandler(rhType.parseResultHandler(rh));
}
}
return sc;
} catch (InstantiationException ex) {
throw new InfrastructureException(ex);
} catch (IllegalAccessException ex) {
throw new InfrastructureException(ex);
} catch (SecurityException ex) {
throw new InfrastructureException(ex);
} catch (NoSuchMethodException ex) {
throw new InfrastructureException(ex);
} catch (IllegalArgumentException ex) {
throw new InfrastructureException(ex);
} catch (InvocationTargetException ex) {
throw new InfrastructureException(ex);
}
}
private SearchConfiguration findParent(
final String id,
final SearchMode mode){
SearchMode m = mode;
SearchConfiguration config = null;
do{
COMMANDS_LOCK.readLock().lock();
final Map<String,SearchConfiguration> configs = COMMANDS.get(m);
COMMANDS_LOCK.readLock().unlock();
config = configs.get(id);
m = m.getParentSearchMode();
}while( config == null && m != null );
return config;
}
private Collection<FastNavigator> parseNavigators(final Element navsE){
final Collection<FastNavigator> navigators = new ArrayList<FastNavigator>();
final NodeList children = navsE.getChildNodes();
for( int i = 0; i < children.getLength(); ++i){
final Node child = children.item(i);
if( child instanceof Element && "navigator".equals(((Element)child).getLocalName())){
final Element navE = (Element)child;
final String id = navE.getAttribute("id");
LOG.info(INFO_PARSING_NAVIGATOR + id);
final FastNavigator nav = new FastNavigator(
navE.getAttribute("name"),
navE.getAttribute("field"),
navE.getAttribute("display-name"));
nav.setId(id);
final Collection<FastNavigator> childNavigators = parseNavigators(navE);
if( childNavigators.size() > 1 ){
throw new IllegalStateException(ERR_ONLY_ONE_CHILD_NAVIGATOR_ALLOWED + id);
}else if( childNavigators.size() == 1 ){
nav.setChildNavigator(childNavigators.iterator().next());
}
navigators.add(nav);
}
}
return navigators;
}
}
private enum QueryTransformerTypes {
EXACT_TITLE_MATCH (ExactTitleMatchTransformer.class),
INFOPAGE (InfopageQueryTransformer.class),
NEWS (NewsTransformer.class),
PREFIX_REMOVER (PrefixRemoverTransformer.class),
SIMPLE_SITE_SEARCH (SimpleSiteSearchTransformer.class),
SYNONYM (SynonymQueryTransformer.class),
TERM_PREFIX (TermPrefixTransformer.class),
TV (TvQueryTransformer.class);
private final Class<? extends QueryTransformer> clazz;
private final String xmlName;
QueryTransformerTypes(final Class<? extends QueryTransformer> c){
clazz = c;
xmlName = name().replaceAll("_","-").toLowerCase();
}
public String getXmlName(){
return xmlName;
}
public QueryTransformer parseQueryTransformer(final Element qt){
try {
LOG.info(INFO_PARSING_QUERY_TRANSFORMER + xmlName);
final QueryTransformer transformer = clazz.newInstance();
switch(this){
case PREFIX_REMOVER:
final PrefixRemoverTransformer prqt = (PrefixRemoverTransformer) transformer;
prqt.addPrefixes( qt.getAttribute("prefixes").split(",") );
break;
case SIMPLE_SITE_SEARCH:
final SimpleSiteSearchTransformer ssqt = (SimpleSiteSearchTransformer) transformer;
ssqt.setParameterName( qt.getAttribute("parameter"));
break;
case TERM_PREFIX:
final TermPrefixTransformer tpqt = (TermPrefixTransformer) transformer;
tpqt.setPrefix( qt.getAttribute("prefix"));
tpqt.setNumberPrefix( qt.getAttribute("number-prefix"));
break;
}
return transformer;
} catch (InstantiationException ex) {
throw new InfrastructureException(ex);
} catch (IllegalAccessException ex) {
throw new InfrastructureException(ex);
}
}
}
private enum ResultHandlerTypes {
ADD_DOC_COUNT (AddDocCountModifier.class),
AGE_CALCULATOR (AgeCalculatorResultHandler.class),
CATEGORY_SPLITTER (CategorySplitter.class),
CONTENT_SOURCE_COLLECTOR (ContentSourceCollector.class),
DISCARD_OLD_NEWS (DiscardOldNewsResultHandler.class),
FIELD_CHOOSER (FieldChooser.class),
FIND_FILE_FORMAT (FindFileFormat.class),
IMAGE_HELPER (ImageHelper.class),
MULTIVALUED_FIELD_COLLECTOR (MultiValuedFieldCollector.class),
PHONE_NUMBER_CHOOSER (PhoneNumberChooser.class),
PHONE_NUMBER_FORMATTER (PhoneNumberFormatter.class),
SPELLING_SUGGESTION_CHOOSER (SpellingSuggestionChooser.class),
SUM (SumFastModifiers.class),
TV (TvEnrichmentDateFormatHandler.class),
WEATHER_CELCIUS (WeatherCelciusHandler.class),
WEATHER_DATE (WeatherDateHandler.class),
TEXT_OUTPUT (TextOutputResultHandler.class),
VELOCITY_OUTPUT (VelocityResultHandler.class),
XML_OUTPUT (XmlOutputResultHandler.class);
private final Class<? extends ResultHandler> clazz;
private final String xmlName;
ResultHandlerTypes(final Class<? extends ResultHandler> c){
clazz = c;
xmlName = name().replaceAll("_","-").toLowerCase();
}
public String getXmlName(){
return xmlName;
}
public ResultHandler parseResultHandler(final Element rh){
try {
LOG.info(INFO_PARSING_RESULT_HANDLER + xmlName);
final ResultHandler handler = clazz.newInstance();
switch(this){
case ADD_DOC_COUNT:
final AddDocCountModifier adc = (AddDocCountModifier) handler;
adc.setModifierName(rh.getAttribute("modifier"));
break;
case AGE_CALCULATOR:
final AgeCalculatorResultHandler ac = (AgeCalculatorResultHandler) handler;
ac.setTargetField(rh.getAttribute("target"));
ac.setSourceField(rh.getAttribute("source"));
break;
case FIELD_CHOOSER:
final FieldChooser fc = (FieldChooser) handler;
fc.setTargetField(rh.getAttribute("target"));
final String[] fields = rh.getAttribute("fields").split(",");
for( String field : fields ){
fc.addField(field);
}
break;
case SPELLING_SUGGESTION_CHOOSER:
final SpellingSuggestionChooser ssc = (SpellingSuggestionChooser) handler;
ssc.setMinScore(parseInt(rh.getAttribute("min-score"), -1));
ssc.setMaxSuggestions(parseInt(rh.getAttribute("max-suggestions"), -1));
ssc.setMaxDistance(parseInt(rh.getAttribute("max-distance"), -1));
ssc.setMuchBetter(parseInt(rh.getAttribute("much-better"), -1));
ssc.setLongQuery(parseInt(rh.getAttribute("long-query"), -1));
ssc.setVeryLongQuery(parseInt(rh.getAttribute("very-long-query"), -1));
ssc.setLongQueryMaxSuggestions(parseInt(rh.getAttribute("long-query-max-suggestions"), -1));
break;
case SUM:
final SumFastModifiers sfm = (SumFastModifiers) handler;
final String[] modifiers = rh.getAttribute("modifiers").split(",");
for( String modifier : modifiers ){
sfm.addModifierName(modifier);
}
sfm.setNavigatorName(rh.getAttribute("navigation"));
sfm.setTargetModifier(rh.getAttribute("target"));
break;
}
return handler;
} catch (InstantiationException ex) {
throw new InfrastructureException(ex);
} catch (IllegalAccessException ex) {
throw new InfrastructureException(ex);
}
}
}
}
| false | true | public SearchConfiguration parseSearchConfiguration(
final Element commandE,
final SearchMode mode){
final SearchConfiguration inherit = findParent(commandE.getAttribute("inherit"), mode);
final String id = commandE.getAttribute("id");
LOG.info(INFO_PARSING_CONFIGURATION + commandE.getLocalName() + " " + id);
try {
final Constructor<? extends SearchConfiguration> con;
con = clazz.getConstructor(SearchConfiguration.class);
final SearchConfiguration sc;
sc = con.newInstance(inherit);
sc.setResultsToReturn(parseInt(commandE.getAttribute("results-to-return"),
inherit != null ? inherit.getResultsToReturn() : -1));
if( sc instanceof AbstractSearchConfiguration ){
// everything extends AbstractSearchConfiguration
final AbstractSearchConfiguration asc = (AbstractSearchConfiguration) sc;
final AbstractSearchConfiguration ascInherit = inherit instanceof AbstractSearchConfiguration
? (AbstractSearchConfiguration)inherit
: null;
asc.setName(id);
asc.setAlwaysRunEnabled( parseBoolean(commandE.getAttribute("always-run"),
ascInherit != null ? ascInherit.isAlwaysRunEnabled() : false) );
asc.setPagingEnabled( parseBoolean(commandE.getAttribute("paging"),
ascInherit != null ? ascInherit.isPagingEnabled() : false) );
asc.setUseParameterAsQuery( commandE.getAttribute("query-parameter") );
if( commandE.getAttribute("result-fields").length() >0 ){
final String[] resultFields = commandE.getAttribute("result-fields").split(",");
for( String resultField : resultFields ){
asc.addResultField(resultField);
}
}
asc.setStatisticsName(parseString(commandE.getAttribute("statistical-name"),
ascInherit != null ? ascInherit.getStatisticsName() : ""));
}
if( sc instanceof FastConfiguration ){
final FastConfiguration fsc = (FastConfiguration) sc;
final FastConfiguration fscInherit = inherit instanceof FastConfiguration
? (FastConfiguration)inherit
: null;
fsc.setClusteringEnabled(parseBoolean(commandE.getAttribute("clustering"),
fscInherit != null ? fscInherit.isClusteringEnabled() : false));
fsc.setCollapsingEnabled(parseBoolean(commandE.getAttribute("collapsing"),
fscInherit != null ? fscInherit.isCollapsingEnabled() : false));
//fsc.setCollectionFilterString(commandE.getAttribute("collection-filter-string")); // FIXME !!
if( commandE.getAttribute("collections").length() >0 ){
final String[] collections = commandE.getAttribute("collections").split(",");
for(String collection : collections){
fsc.addCollection(collection);
}
}
fsc.setFilter(parseString(commandE.getAttribute("filter"),
fscInherit != null ? fscInherit.getFilter() : ""));
fsc.setIgnoreNavigationEnabled(parseBoolean(commandE.getAttribute("ignore-navigation"),
fscInherit != null ? fscInherit.isIgnoreNavigationEnabled() : false));
fsc.setOffensiveScoreLimit(parseInt(commandE.getAttribute("offensive-score-limit"),
fscInherit != null ? fscInherit.getOffensiveScoreLimit() : -1));
fsc.setQtPipeline(parseString(commandE.getAttribute("qt-pipeline"),
fscInherit != null ? fscInherit.getQtPipeline() : ""));
fsc.setQueryServerURL(parseString(commandE.getAttribute("query-server-url"),
fscInherit != null ? fscInherit.getQueryServerURL() : null));
fsc.setRelevantQueriesEnabled(parseBoolean(commandE.getAttribute("relevant-queries"),
fscInherit != null ? fscInherit.isRelevantQueriesEnabled() : false));
fsc.setSortBy(parseString(commandE.getAttribute("sort-by"),
fscInherit != null ? fscInherit.getSortBy() : ""));
fsc.setSpamScoreLimit( parseInt(commandE.getAttribute("spam-score-limit"),
fscInherit != null ? fscInherit.getSpamScoreLimit() : -1));
fsc.setSpellcheckEnabled( parseBoolean(commandE.getAttribute("spellcheck"),
fscInherit != null ? fscInherit.isSpellcheckEnabled() : false));
//fsc.setSynonymEnabled(Boolean.parseBoolean(commandE.getAttribute("synonyms"))); // FIXME !!
// navigators
final NodeList nList = commandE.getElementsByTagName("navigators");
for( int i = 0; i < nList.getLength(); ++i){
final Collection<FastNavigator> navigators = parseNavigators( (Element)nList.item(i));
for(FastNavigator navigator : navigators ){
fsc.addNavigator(navigator, navigator.getId());
}
}
}
if( sc instanceof MathExpressionConfiguration ){
final MathExpressionConfiguration msc = (MathExpressionConfiguration) sc;
}
if( sc instanceof NewsSearchConfiguration ){
final NewsSearchConfiguration nsc = (NewsSearchConfiguration) sc;
}
if( sc instanceof OverturePPCConfiguration ){
final OverturePPCConfiguration osc = (OverturePPCConfiguration) sc;
final OverturePPCConfiguration oscInherit = inherit instanceof OverturePPCConfiguration
? (OverturePPCConfiguration)inherit
: null;
osc.setPartnerId(parseString(commandE.getAttribute("partner-id"),
oscInherit != null ? oscInherit.getPartnerId() : ""));
}
if( sc instanceof PicSearchConfiguration ){
final PicSearchConfiguration psc = (PicSearchConfiguration) sc;
}
if( sc instanceof SensisSearchConfiguration ){
final SensisSearchConfiguration ssc = (SensisSearchConfiguration) sc;
}
if( sc instanceof StockSearchConfiguration ){
final StockSearchConfiguration ssc = (StockSearchConfiguration) sc;
}
if( sc instanceof WebSearchConfiguration ){
final WebSearchConfiguration wsc = (WebSearchConfiguration) sc;
}
if( sc instanceof WhiteSearchConfiguration ){
final WhiteSearchConfiguration wsc = (WhiteSearchConfiguration) sc;
}
if( sc instanceof YellowSearchConfiguration ){
final YellowSearchConfiguration ysc = (YellowSearchConfiguration) sc;
}
// query transformers
final NodeList qtNodeList = commandE.getElementsByTagName("query-transformers");
for( QueryTransformerTypes qtType : QueryTransformerTypes.values()){
final NodeList qtList = commandE.getElementsByTagName(qtType.getXmlName());
for( int i = 0 ; i < qtList.getLength(); ++i ){
final Element qt = (Element) qtList.item(i);
sc.addQueryTransformer(qtType.parseQueryTransformer(qt));
}
}
// result handlers
final NodeList rhNodeList = commandE.getElementsByTagName("result-handler");
final List<ResultHandler> handlers = new ArrayList<ResultHandler>();
for( ResultHandlerTypes rhType : ResultHandlerTypes.values()){
final NodeList rhList = commandE.getElementsByTagName(rhType.getXmlName());
for( int i = 0 ; i < rhList.getLength(); ++i ){
final Element rh = (Element) rhList.item(i);
sc.addResultHandler(rhType.parseResultHandler(rh));
}
}
return sc;
} catch (InstantiationException ex) {
throw new InfrastructureException(ex);
} catch (IllegalAccessException ex) {
throw new InfrastructureException(ex);
} catch (SecurityException ex) {
throw new InfrastructureException(ex);
} catch (NoSuchMethodException ex) {
throw new InfrastructureException(ex);
} catch (IllegalArgumentException ex) {
throw new InfrastructureException(ex);
} catch (InvocationTargetException ex) {
throw new InfrastructureException(ex);
}
}
| public SearchConfiguration parseSearchConfiguration(
final Element commandE,
final SearchMode mode){
final SearchConfiguration inherit = findParent(commandE.getAttribute("inherit"), mode);
final String id = commandE.getAttribute("id");
LOG.info(INFO_PARSING_CONFIGURATION + commandE.getLocalName() + " " + id);
try {
final Constructor<? extends SearchConfiguration> con;
con = clazz.getConstructor(SearchConfiguration.class);
final SearchConfiguration sc;
sc = con.newInstance(inherit);
sc.setResultsToReturn(parseInt(commandE.getAttribute("results-to-return"),
inherit != null ? inherit.getResultsToReturn() : -1));
if( sc instanceof AbstractSearchConfiguration ){
// everything extends AbstractSearchConfiguration
final AbstractSearchConfiguration asc = (AbstractSearchConfiguration) sc;
final AbstractSearchConfiguration ascInherit = inherit instanceof AbstractSearchConfiguration
? (AbstractSearchConfiguration)inherit
: null;
asc.setName(id);
asc.setAlwaysRunEnabled( parseBoolean(commandE.getAttribute("always-run"),
ascInherit != null ? ascInherit.isAlwaysRunEnabled() : false) );
asc.setPagingEnabled( parseBoolean(commandE.getAttribute("paging"),
ascInherit != null ? ascInherit.isPagingEnabled() : false) );
asc.setUseParameterAsQuery( commandE.getAttribute("query-parameter") );
if( commandE.getAttribute("result-fields").length() >0 ){
final String[] resultFields = commandE.getAttribute("result-fields").split(",");
for( String resultField : resultFields ){
asc.addResultField(resultField);
}
}
asc.setStatisticsName(parseString(commandE.getAttribute("statistical-name"),
ascInherit != null ? ascInherit.getStatisticsName() : ""));
}
if( sc instanceof FastConfiguration ){
final FastConfiguration fsc = (FastConfiguration) sc;
final FastConfiguration fscInherit = inherit instanceof FastConfiguration
? (FastConfiguration)inherit
: null;
fsc.setClusteringEnabled(parseBoolean(commandE.getAttribute("clustering"),
fscInherit != null ? fscInherit.isClusteringEnabled() : false));
fsc.setCollapsingEnabled(parseBoolean(commandE.getAttribute("collapsing"),
fscInherit != null ? fscInherit.isCollapsingEnabled() : false));
//fsc.setCollectionFilterString(commandE.getAttribute("collection-filter-string")); // FIXME !!
if( commandE.getAttribute("collections").length() >0 ){
final String[] collections = commandE.getAttribute("collections").split(",");
for(String collection : collections){
fsc.addCollection(collection);
}
}
fsc.setFilter(parseString(commandE.getAttribute("filter"),
fscInherit != null ? fscInherit.getFilter() : ""));
fsc.setIgnoreNavigationEnabled(parseBoolean(commandE.getAttribute("ignore-navigation"),
fscInherit != null ? fscInherit.isIgnoreNavigationEnabled() : false));
fsc.setOffensiveScoreLimit(parseInt(commandE.getAttribute("offensive-score-limit"),
fscInherit != null ? fscInherit.getOffensiveScoreLimit() : -1));
fsc.setQtPipeline(parseString(commandE.getAttribute("qt-pipeline"),
fscInherit != null ? fscInherit.getQtPipeline() : ""));
fsc.setQueryServerURL(parseString(commandE.getAttribute("query-server-url"),
fscInherit != null ? fscInherit.getQueryServerURL() : null));
fsc.setRelevantQueriesEnabled(parseBoolean(commandE.getAttribute("relevant-queries"),
fscInherit != null ? fscInherit.isRelevantQueriesEnabled() : false));
fsc.setSortBy(parseString(commandE.getAttribute("sort-by"),
fscInherit != null ? fscInherit.getSortBy() : ""));
fsc.setSpamScoreLimit( parseInt(commandE.getAttribute("spam-score-limit"),
fscInherit != null ? fscInherit.getSpamScoreLimit() : -1));
fsc.setSpellcheckEnabled( parseBoolean(commandE.getAttribute("spellcheck"),
fscInherit != null ? fscInherit.isSpellcheckEnabled() : false));
//fsc.setSynonymEnabled(Boolean.parseBoolean(commandE.getAttribute("synonyms"))); // FIXME !!
// navigators
final NodeList nList = commandE.getElementsByTagName("navigators");
for( int i = 0; i < nList.getLength(); ++i){
final Collection<FastNavigator> navigators = parseNavigators( (Element)nList.item(i));
for(FastNavigator navigator : navigators ){
fsc.addNavigator(navigator, navigator.getId());
}
}
}
if( sc instanceof MathExpressionConfiguration ){
final MathExpressionConfiguration msc = (MathExpressionConfiguration) sc;
}
if( sc instanceof NewsSearchConfiguration ){
final NewsSearchConfiguration nsc = (NewsSearchConfiguration) sc;
}
if( sc instanceof OverturePPCConfiguration ){
final OverturePPCConfiguration osc = (OverturePPCConfiguration) sc;
final OverturePPCConfiguration oscInherit = inherit instanceof OverturePPCConfiguration
? (OverturePPCConfiguration)inherit
: null;
osc.setPartnerId(parseString(commandE.getAttribute("partner-id"),
oscInherit != null ? oscInherit.getPartnerId() : ""));
}
if( sc instanceof PicSearchConfiguration ){
final PicSearchConfiguration psc = (PicSearchConfiguration) sc;
}
if( sc instanceof SensisSearchConfiguration ){
final SensisSearchConfiguration ssc = (SensisSearchConfiguration) sc;
}
if( sc instanceof StockSearchConfiguration ){
final StockSearchConfiguration ssc = (StockSearchConfiguration) sc;
}
if( sc instanceof WebSearchConfiguration ){
final WebSearchConfiguration wsc = (WebSearchConfiguration) sc;
}
if( sc instanceof WhiteSearchConfiguration ){
final WhiteSearchConfiguration wsc = (WhiteSearchConfiguration) sc;
}
if( sc instanceof YellowSearchConfiguration ){
final YellowSearchConfiguration ysc = (YellowSearchConfiguration) sc;
}
// query transformers
final NodeList qtNodeList = commandE.getElementsByTagName("query-transformers");
final Element qtRootElement = (Element) qtNodeList.item(0);
for( QueryTransformerTypes qtType : QueryTransformerTypes.values()){
final NodeList qtList = qtRootElement.getElementsByTagName(qtType.getXmlName());
for( int i = 0 ; i < qtList.getLength(); ++i ){
final Element qt = (Element) qtList.item(i);
sc.addQueryTransformer(qtType.parseQueryTransformer(qt));
}
}
// result handlers
final NodeList rhNodeList = commandE.getElementsByTagName("result-handler");
final Element rhRootElement = (Element) rhNodeList.item(0);
for( ResultHandlerTypes rhType : ResultHandlerTypes.values()){
final NodeList rhList = rhRootElement.getElementsByTagName(rhType.getXmlName());
for( int i = 0 ; i < rhList.getLength(); ++i ){
final Element rh = (Element) rhList.item(i);
sc.addResultHandler(rhType.parseResultHandler(rh));
}
}
return sc;
} catch (InstantiationException ex) {
throw new InfrastructureException(ex);
} catch (IllegalAccessException ex) {
throw new InfrastructureException(ex);
} catch (SecurityException ex) {
throw new InfrastructureException(ex);
} catch (NoSuchMethodException ex) {
throw new InfrastructureException(ex);
} catch (IllegalArgumentException ex) {
throw new InfrastructureException(ex);
} catch (InvocationTargetException ex) {
throw new InfrastructureException(ex);
}
}
|
diff --git a/HandsOnCentralOhio/src/com/example/handsoncentralohio/LoginActivity.java b/HandsOnCentralOhio/src/com/example/handsoncentralohio/LoginActivity.java
index f2dba54..17fea71 100644
--- a/HandsOnCentralOhio/src/com/example/handsoncentralohio/LoginActivity.java
+++ b/HandsOnCentralOhio/src/com/example/handsoncentralohio/LoginActivity.java
@@ -1,22 +1,22 @@
package com.example.handsoncentralohio;
import android.os.Bundle;
import android.app.Activity;
import android.view.Menu;
public class LoginActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
- getMenuInflater().inflate(R.menu.login, menu);
+ //getMenuInflater().inflate(R.menu.login, menu);
return true;
}
}
| true | true | public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.login, menu);
return true;
}
| public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
//getMenuInflater().inflate(R.menu.login, menu);
return true;
}
|
diff --git a/src/main/java/org/sikuli/slides/parsing/SlideParser.java b/src/main/java/org/sikuli/slides/parsing/SlideParser.java
index 84fa31b..113c00e 100644
--- a/src/main/java/org/sikuli/slides/parsing/SlideParser.java
+++ b/src/main/java/org/sikuli/slides/parsing/SlideParser.java
@@ -1,314 +1,314 @@
/**
Khalid
*/
package org.sikuli.slides.parsing;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import org.sikuli.slides.media.Sound;
import org.sikuli.slides.screenshots.Screenshot;
import org.sikuli.slides.shapes.SlideShape;
public class SlideParser extends DefaultHandler {
private Screenshot originalScreenshot;
private Sound mSound;
private String xmlFile;
private boolean inScreenshot=false;
private boolean inPictureElement=false;
private boolean inSound=false;
private boolean inShapeProperties=false;
private boolean inShapeBackgroundColor=false;
private boolean inShape=false;
private boolean inArrowShape=false;
private SlideShape slideShape;
private boolean isSortedTargets=false;
private boolean inTextBody=false;
private String textBody="";
private String arrowHeadId="";
private String arrowEndId="";
private List<SlideShape> shapesList;
private List<SlideShape> labelsList;
private int order;
private String _shapeName, _shapeId;
private int _offx, _offy, _cx, _cy;
public SlideParser(String xmlFile){
this.xmlFile=xmlFile;
shapesList=new ArrayList<SlideShape>();
labelsList=new ArrayList<SlideShape>();
}
public void parseDocument(){
// reset variables
textBody="";
arrowHeadId="";
arrowEndId="";
order=-1;
SAXParserFactory factory = SAXParserFactory.newInstance();
try{
SAXParser parser = factory.newSAXParser();
parser.parse(xmlFile, this);
}
catch (ParserConfigurationException e) {
e.printStackTrace();
}
catch (SAXException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException{
// Part 1: Parsing the original screen shoot info
// if current element is picture element
if (qName.equalsIgnoreCase("p:pic")) {
inPictureElement=true;
}
/*
* if the current child element is the "a:blip", get the qualified name or the relationship id.
* This must be done because the slide.xml file doesn't include the image file name in the /media directory.
*/
else if(inScreenshot && qName.equalsIgnoreCase("a:blip")){
// get the relationship id
originalScreenshot.setRelationshipID(attributes.getValue("r:embed"));
}
/*
* if the current element is the audio file, get the relationship id number.
*/
else if(inSound && qName.equalsIgnoreCase("a:audioFile")){
mSound.setRelationshipId(attributes.getValue("r:link"));
}
/* if the current child element is the non-visual propeties of the shape (p:cNvPr),
then get the screenshot name and filename
*/
else if(inPictureElement && qName.equalsIgnoreCase("p:cNvPr")){
String name=attributes.getValue("name");
if(name.contains("Picture")){
originalScreenshot=new Screenshot();
inScreenshot=true;
originalScreenshot.setName(name);
}
- else if(name.contains("Sound")){
+ else if(name.contains("Sound") || name.contains(".wav")){
mSound=new Sound();
inSound=true;
mSound.setName(name);
}
}
// if the current child element is the shape properties (p:spPr), then get the screenshot dimensions
else if(inScreenshot && qName.equals("p:spPr")){
inShapeProperties=true;
}
// if the current child element is bounding box, get the offset in x and y
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:off")){
//TODO: not sure if we will need this. See: http://openxmldeveloper.org/discussions/formats/f/13/p/867/2206.aspx
originalScreenshot.setOffX(Integer.parseInt(attributes.getValue("x")));
originalScreenshot.setOffY(Integer.parseInt(attributes.getValue("y")));
}
// if the current child element is the extents in x and y, get the values
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:ext")){
// Bug#39: check if the cx and cy attributes exist in case of copying and pasting the slide
String cx_val=attributes.getValue("cx");
String cy_val=attributes.getValue("cy");
if(cx_val!=null&&cy_val!=null){
originalScreenshot.setCx(Integer.parseInt(attributes.getValue("cx")));
originalScreenshot.setCy(Integer.parseInt(attributes.getValue("cy")));
}
}
// Part2: Parsing the shape information.
// if the current element is a shape
else if(qName.equalsIgnoreCase("p:sp")){
inShape=true;
order++;
// shape info variables
_shapeName="";
_shapeId="";
_offx=0; _offy=0; _cx=0; _cy=0;
}
// if the current element is the shape type, create the corresponding shape object
//TODO check if a:prstGeom is more accurate than p:cNvPr
else if(inShape&&qName.equalsIgnoreCase("p:cNvPr")){
// get the shape name
_shapeName=attributes.getValue("name");
// get the shape id
_shapeId=attributes.getValue("id");
}
// if the current child element is bounding box, get the offset in x and y
else if(inShape && qName.equalsIgnoreCase("a:off")){
_offx=Integer.parseInt(attributes.getValue("x"));
_offy=Integer.parseInt(attributes.getValue("y"));
}
// if the current child element is the extents in x and y, get the values
else if(inShape && qName.equalsIgnoreCase("a:ext")){
_cx=Integer.parseInt(attributes.getValue("cx"));
_cy=Integer.parseInt(attributes.getValue("cy"));
}
// if the current child element is the shape persistent geometry, create the shape based on its type
else if(inShape && qName.equalsIgnoreCase("a:prstGeom")){
String shapeType=attributes.getValue("prst");
slideShape=new SlideShape(_shapeId,_shapeName,order,shapeType,_offx,_offy,_cx,_cy,"");
}
// if the current element is the solid background color
else if(inShape && qName.equalsIgnoreCase("a:solidFill")){
inShapeBackgroundColor=true;
}
else if(inShape && inShapeBackgroundColor && qName.equalsIgnoreCase("a:srgbClr")){
if(slideShape!=null){
slideShape.setBackgroundColor(attributes.getValue("val"));
}
}
// if the current element is the shape text body
else if(inShape && qName.equalsIgnoreCase("p:txBody")){
inTextBody=true;
}
// get font size
else if(inTextBody && qName.equals("a:rPr")){
String size= attributes.getValue("sz");
if(size!=null&&slideShape!=null){
slideShape.setTextSize(Integer.parseInt(size));
}
}
// Parsing connected shapes like arrows
else if(qName.equalsIgnoreCase("p:cxnSp")){
inArrowShape=true;
}
// get the start connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:stCxn")){
arrowHeadId=attributes.getValue("id");
}
// get the end connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:endCxn")){
arrowEndId=attributes.getValue("id");
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if(inScreenshot && qName.equalsIgnoreCase("p:pic")){
if(inScreenshot){
inScreenshot=false;
}
else if(inSound){
inSound=false;
}
inPictureElement=false;
}
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("p:spPr")){
inShapeProperties=false;
}
else if(inShape && qName.equalsIgnoreCase("p:sp")){
inShape=false;
// if the shape is a label, add it to the label list
if(slideShape!=null&&slideShape.getBackgroundColor().equals("FFFF00")){
labelsList.add(slideShape);
}
else{
addShapeToList();
}
}
else if(inArrowShape && qName.equalsIgnoreCase("p:cxnSp")){
inArrowShape=false;
setRoundedRectangleDragAndDropOrder();
}
else if(inShape && qName.equalsIgnoreCase("a:solidFill")){
inShapeBackgroundColor=false;
}
else if(inTextBody && qName.equalsIgnoreCase("p:txBody")){
inTextBody=false;
if(slideShape!=null){
// check text value to sort the targets
checkNumeric(textBody);
slideShape.setText(textBody);
}
textBody="";
}
}
private void checkNumeric(String text) {
try{
int target_order=Integer.parseInt(text.trim());
isSortedTargets=true;
slideShape.setTargetOrder(target_order);
}
catch(NumberFormatException e){
return;
}
}
private void setRoundedRectangleDragAndDropOrder() {
if(shapesList!=null){
for(SlideShape mShape:shapesList){
if(mShape.getId().equals(arrowHeadId)){
mShape.setOrder(0);
}
else if(mShape.getId().equals(arrowEndId)){
mShape.setOrder(1);
}
}
}
}
@Override
public void characters(char[] ch, int start, int length){
if(inTextBody){
textBody+=new String(ch, start, length);
}
}
// return the original screenshot
public Screenshot getScreenshot(){
return originalScreenshot;
}
// return the sound
public Sound getSound(){
return mSound;
}
// add the shape to the list
private void addShapeToList(){
if(slideShape!=null)
shapesList.add(slideShape);
}
// return list of shapes
public List<SlideShape> getShapes(){
// check if sorting shapes is required
if(isSortedTargets){
sortShapes();
}
return shapesList;
}
private void sortShapes() {
Collections.sort(shapesList);
}
// return a list of labels
public List<SlideShape> getLabels(){
return labelsList;
}
}
| true | true | public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException{
// Part 1: Parsing the original screen shoot info
// if current element is picture element
if (qName.equalsIgnoreCase("p:pic")) {
inPictureElement=true;
}
/*
* if the current child element is the "a:blip", get the qualified name or the relationship id.
* This must be done because the slide.xml file doesn't include the image file name in the /media directory.
*/
else if(inScreenshot && qName.equalsIgnoreCase("a:blip")){
// get the relationship id
originalScreenshot.setRelationshipID(attributes.getValue("r:embed"));
}
/*
* if the current element is the audio file, get the relationship id number.
*/
else if(inSound && qName.equalsIgnoreCase("a:audioFile")){
mSound.setRelationshipId(attributes.getValue("r:link"));
}
/* if the current child element is the non-visual propeties of the shape (p:cNvPr),
then get the screenshot name and filename
*/
else if(inPictureElement && qName.equalsIgnoreCase("p:cNvPr")){
String name=attributes.getValue("name");
if(name.contains("Picture")){
originalScreenshot=new Screenshot();
inScreenshot=true;
originalScreenshot.setName(name);
}
else if(name.contains("Sound")){
mSound=new Sound();
inSound=true;
mSound.setName(name);
}
}
// if the current child element is the shape properties (p:spPr), then get the screenshot dimensions
else if(inScreenshot && qName.equals("p:spPr")){
inShapeProperties=true;
}
// if the current child element is bounding box, get the offset in x and y
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:off")){
//TODO: not sure if we will need this. See: http://openxmldeveloper.org/discussions/formats/f/13/p/867/2206.aspx
originalScreenshot.setOffX(Integer.parseInt(attributes.getValue("x")));
originalScreenshot.setOffY(Integer.parseInt(attributes.getValue("y")));
}
// if the current child element is the extents in x and y, get the values
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:ext")){
// Bug#39: check if the cx and cy attributes exist in case of copying and pasting the slide
String cx_val=attributes.getValue("cx");
String cy_val=attributes.getValue("cy");
if(cx_val!=null&&cy_val!=null){
originalScreenshot.setCx(Integer.parseInt(attributes.getValue("cx")));
originalScreenshot.setCy(Integer.parseInt(attributes.getValue("cy")));
}
}
// Part2: Parsing the shape information.
// if the current element is a shape
else if(qName.equalsIgnoreCase("p:sp")){
inShape=true;
order++;
// shape info variables
_shapeName="";
_shapeId="";
_offx=0; _offy=0; _cx=0; _cy=0;
}
// if the current element is the shape type, create the corresponding shape object
//TODO check if a:prstGeom is more accurate than p:cNvPr
else if(inShape&&qName.equalsIgnoreCase("p:cNvPr")){
// get the shape name
_shapeName=attributes.getValue("name");
// get the shape id
_shapeId=attributes.getValue("id");
}
// if the current child element is bounding box, get the offset in x and y
else if(inShape && qName.equalsIgnoreCase("a:off")){
_offx=Integer.parseInt(attributes.getValue("x"));
_offy=Integer.parseInt(attributes.getValue("y"));
}
// if the current child element is the extents in x and y, get the values
else if(inShape && qName.equalsIgnoreCase("a:ext")){
_cx=Integer.parseInt(attributes.getValue("cx"));
_cy=Integer.parseInt(attributes.getValue("cy"));
}
// if the current child element is the shape persistent geometry, create the shape based on its type
else if(inShape && qName.equalsIgnoreCase("a:prstGeom")){
String shapeType=attributes.getValue("prst");
slideShape=new SlideShape(_shapeId,_shapeName,order,shapeType,_offx,_offy,_cx,_cy,"");
}
// if the current element is the solid background color
else if(inShape && qName.equalsIgnoreCase("a:solidFill")){
inShapeBackgroundColor=true;
}
else if(inShape && inShapeBackgroundColor && qName.equalsIgnoreCase("a:srgbClr")){
if(slideShape!=null){
slideShape.setBackgroundColor(attributes.getValue("val"));
}
}
// if the current element is the shape text body
else if(inShape && qName.equalsIgnoreCase("p:txBody")){
inTextBody=true;
}
// get font size
else if(inTextBody && qName.equals("a:rPr")){
String size= attributes.getValue("sz");
if(size!=null&&slideShape!=null){
slideShape.setTextSize(Integer.parseInt(size));
}
}
// Parsing connected shapes like arrows
else if(qName.equalsIgnoreCase("p:cxnSp")){
inArrowShape=true;
}
// get the start connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:stCxn")){
arrowHeadId=attributes.getValue("id");
}
// get the end connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:endCxn")){
arrowEndId=attributes.getValue("id");
}
}
| public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException{
// Part 1: Parsing the original screen shoot info
// if current element is picture element
if (qName.equalsIgnoreCase("p:pic")) {
inPictureElement=true;
}
/*
* if the current child element is the "a:blip", get the qualified name or the relationship id.
* This must be done because the slide.xml file doesn't include the image file name in the /media directory.
*/
else if(inScreenshot && qName.equalsIgnoreCase("a:blip")){
// get the relationship id
originalScreenshot.setRelationshipID(attributes.getValue("r:embed"));
}
/*
* if the current element is the audio file, get the relationship id number.
*/
else if(inSound && qName.equalsIgnoreCase("a:audioFile")){
mSound.setRelationshipId(attributes.getValue("r:link"));
}
/* if the current child element is the non-visual propeties of the shape (p:cNvPr),
then get the screenshot name and filename
*/
else if(inPictureElement && qName.equalsIgnoreCase("p:cNvPr")){
String name=attributes.getValue("name");
if(name.contains("Picture")){
originalScreenshot=new Screenshot();
inScreenshot=true;
originalScreenshot.setName(name);
}
else if(name.contains("Sound") || name.contains(".wav")){
mSound=new Sound();
inSound=true;
mSound.setName(name);
}
}
// if the current child element is the shape properties (p:spPr), then get the screenshot dimensions
else if(inScreenshot && qName.equals("p:spPr")){
inShapeProperties=true;
}
// if the current child element is bounding box, get the offset in x and y
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:off")){
//TODO: not sure if we will need this. See: http://openxmldeveloper.org/discussions/formats/f/13/p/867/2206.aspx
originalScreenshot.setOffX(Integer.parseInt(attributes.getValue("x")));
originalScreenshot.setOffY(Integer.parseInt(attributes.getValue("y")));
}
// if the current child element is the extents in x and y, get the values
else if(inScreenshot && inShapeProperties && qName.equalsIgnoreCase("a:ext")){
// Bug#39: check if the cx and cy attributes exist in case of copying and pasting the slide
String cx_val=attributes.getValue("cx");
String cy_val=attributes.getValue("cy");
if(cx_val!=null&&cy_val!=null){
originalScreenshot.setCx(Integer.parseInt(attributes.getValue("cx")));
originalScreenshot.setCy(Integer.parseInt(attributes.getValue("cy")));
}
}
// Part2: Parsing the shape information.
// if the current element is a shape
else if(qName.equalsIgnoreCase("p:sp")){
inShape=true;
order++;
// shape info variables
_shapeName="";
_shapeId="";
_offx=0; _offy=0; _cx=0; _cy=0;
}
// if the current element is the shape type, create the corresponding shape object
//TODO check if a:prstGeom is more accurate than p:cNvPr
else if(inShape&&qName.equalsIgnoreCase("p:cNvPr")){
// get the shape name
_shapeName=attributes.getValue("name");
// get the shape id
_shapeId=attributes.getValue("id");
}
// if the current child element is bounding box, get the offset in x and y
else if(inShape && qName.equalsIgnoreCase("a:off")){
_offx=Integer.parseInt(attributes.getValue("x"));
_offy=Integer.parseInt(attributes.getValue("y"));
}
// if the current child element is the extents in x and y, get the values
else if(inShape && qName.equalsIgnoreCase("a:ext")){
_cx=Integer.parseInt(attributes.getValue("cx"));
_cy=Integer.parseInt(attributes.getValue("cy"));
}
// if the current child element is the shape persistent geometry, create the shape based on its type
else if(inShape && qName.equalsIgnoreCase("a:prstGeom")){
String shapeType=attributes.getValue("prst");
slideShape=new SlideShape(_shapeId,_shapeName,order,shapeType,_offx,_offy,_cx,_cy,"");
}
// if the current element is the solid background color
else if(inShape && qName.equalsIgnoreCase("a:solidFill")){
inShapeBackgroundColor=true;
}
else if(inShape && inShapeBackgroundColor && qName.equalsIgnoreCase("a:srgbClr")){
if(slideShape!=null){
slideShape.setBackgroundColor(attributes.getValue("val"));
}
}
// if the current element is the shape text body
else if(inShape && qName.equalsIgnoreCase("p:txBody")){
inTextBody=true;
}
// get font size
else if(inTextBody && qName.equals("a:rPr")){
String size= attributes.getValue("sz");
if(size!=null&&slideShape!=null){
slideShape.setTextSize(Integer.parseInt(size));
}
}
// Parsing connected shapes like arrows
else if(qName.equalsIgnoreCase("p:cxnSp")){
inArrowShape=true;
}
// get the start connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:stCxn")){
arrowHeadId=attributes.getValue("id");
}
// get the end connected shape id
else if(inArrowShape&&qName.equalsIgnoreCase("a:endCxn")){
arrowEndId=attributes.getValue("id");
}
}
|
diff --git a/dsd-maven-plugin/src/main/java/org/melati/poem/prepro/ReferenceFieldDef.java b/dsd-maven-plugin/src/main/java/org/melati/poem/prepro/ReferenceFieldDef.java
index f7dc29535..a1bb9a44a 100644
--- a/dsd-maven-plugin/src/main/java/org/melati/poem/prepro/ReferenceFieldDef.java
+++ b/dsd-maven-plugin/src/main/java/org/melati/poem/prepro/ReferenceFieldDef.java
@@ -1,135 +1,135 @@
/*
* $Source$
* $Revision$
*
* Part of Melati (http://melati.org), a framework for the rapid
* development of clean, maintainable web applications.
*
* -------------------------------------
* Copyright (C) 2000 William Chesters
* -------------------------------------
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* A copy of the GPL should be in the file org/melati/COPYING in this tree.
* Or see http://melati.org/License.html.
*
* Contact details for copyright holder:
*
* William Chesters <[email protected]>
* http://paneris.org/~williamc
* Obrechtstraat 114, 2517VX Den Haag, The Netherlands
*
*
* ------
* Note
* ------
*
* I will assign copyright to PanEris (http://paneris.org) as soon as
* we have sorted out what sort of legal existence we need to have for
* that to make sense. When WebMacro's "Simple Public License" is
* finalised, we'll offer it as an alternative license for Melati.
* In the meantime, if you want to use Melati on non-GPL terms,
* contact me!
*/
package org.melati.poem.prepro;
import java.util.*;
import java.io.*;
public class ReferenceFieldDef extends FieldDef {
public ReferenceFieldDef(TableDef table, String name, int displayOrder,
String type, Vector qualifiers)
throws IllegalityException {
super(table, name, type, "Integer", displayOrder, qualifiers);
}
protected void generateColRawAccessors(Writer w) throws IOException {
super.generateColRawAccessors(w);
w.write(
"\n" +
" public Object getRaw(Persistent g)\n" +
" throws AccessPoemException {\n" +
" return ((" + mainClass + ")g).get" + suffix + "Troid();\n" +
" }\n" +
"\n" +
" public void setRaw(Persistent g, Object raw)\n" +
" throws AccessPoemException {\n" +
" ((" + mainClass + ")g).set" + suffix + "Troid((" +
rawType + ")raw);\n" +
" }\n");
}
private String targetCast() {
TableDef targetTable = (TableDef)table.dsd.tableOfClass.get(type);
return targetTable == null || targetTable.superclass == null ?
"" : "(" + type + ")";
}
public void generateBaseMethods(Writer w) throws IOException {
super.generateBaseMethods(w);
// FIXME the definition of these is duplicated from TableDef
String targetTableAccessorMethod = "get" + type + "Table";
String targetSuffix = type;
w.write("\n" +
" public Integer get" + suffix + "Troid()\n" +
" throws AccessPoemException {\n" +
" readLock();\n" +
" return get" + suffix + "_unsafe();\n" +
" }\n" +
"\n" +
" public void set" + suffix + "Troid(Integer raw)\n" +
" throws AccessPoemException {\n" +
- " " + tableAccessorMethod + "().get" + suffix + "Column()." +
+ " _" + tableAccessorMethod + "().get" + suffix + "Column()." +
"getType().assertValidRaw(raw);\n" +
" writeLock();\n" +
" set" + suffix + "_unsafe(raw);\n" +
" }\n" +
"\n" +
" public " + type + " get" + suffix + "()\n" +
" throws AccessPoemException, NoSuchRowPoemException {\n" +
" Integer troid = get" + suffix + "Troid();\n" +
" return troid == null ? null :\n" +
// This cast is necessary when the target table is
// an "extends"
" " + targetCast() +
"get" + table.dsd.databaseClass + "()." +
targetTableAccessorMethod + "()." +
"get" + targetSuffix + "Object(troid);\n" +
" }\n" +
"\n" +
" public void set" + suffix + "(" + type + " cooked)\n" +
" throws AccessPoemException {\n" +
" set" + suffix + "Troid(cooked == null ? null : cooked.troid());\n" +
" }\n");
}
public void generateJavaDeclaration(Writer w) throws IOException {
w.write("Integer " + name);
}
public String poemTypeJava() {
// FIXME the definition of these is duplicated from TableDef
String targetTableAccessorMethod = "get" + type + "Table";
return
"new ReferencePoemType(((" + table.dsd.databaseClass + ")getDatabase())." +
targetTableAccessorMethod + "(), " + isNullable + ")";
}
}
| true | true | public void generateBaseMethods(Writer w) throws IOException {
super.generateBaseMethods(w);
// FIXME the definition of these is duplicated from TableDef
String targetTableAccessorMethod = "get" + type + "Table";
String targetSuffix = type;
w.write("\n" +
" public Integer get" + suffix + "Troid()\n" +
" throws AccessPoemException {\n" +
" readLock();\n" +
" return get" + suffix + "_unsafe();\n" +
" }\n" +
"\n" +
" public void set" + suffix + "Troid(Integer raw)\n" +
" throws AccessPoemException {\n" +
" " + tableAccessorMethod + "().get" + suffix + "Column()." +
"getType().assertValidRaw(raw);\n" +
" writeLock();\n" +
" set" + suffix + "_unsafe(raw);\n" +
" }\n" +
"\n" +
" public " + type + " get" + suffix + "()\n" +
" throws AccessPoemException, NoSuchRowPoemException {\n" +
" Integer troid = get" + suffix + "Troid();\n" +
" return troid == null ? null :\n" +
// This cast is necessary when the target table is
// an "extends"
" " + targetCast() +
"get" + table.dsd.databaseClass + "()." +
targetTableAccessorMethod + "()." +
"get" + targetSuffix + "Object(troid);\n" +
" }\n" +
"\n" +
" public void set" + suffix + "(" + type + " cooked)\n" +
" throws AccessPoemException {\n" +
" set" + suffix + "Troid(cooked == null ? null : cooked.troid());\n" +
" }\n");
}
| public void generateBaseMethods(Writer w) throws IOException {
super.generateBaseMethods(w);
// FIXME the definition of these is duplicated from TableDef
String targetTableAccessorMethod = "get" + type + "Table";
String targetSuffix = type;
w.write("\n" +
" public Integer get" + suffix + "Troid()\n" +
" throws AccessPoemException {\n" +
" readLock();\n" +
" return get" + suffix + "_unsafe();\n" +
" }\n" +
"\n" +
" public void set" + suffix + "Troid(Integer raw)\n" +
" throws AccessPoemException {\n" +
" _" + tableAccessorMethod + "().get" + suffix + "Column()." +
"getType().assertValidRaw(raw);\n" +
" writeLock();\n" +
" set" + suffix + "_unsafe(raw);\n" +
" }\n" +
"\n" +
" public " + type + " get" + suffix + "()\n" +
" throws AccessPoemException, NoSuchRowPoemException {\n" +
" Integer troid = get" + suffix + "Troid();\n" +
" return troid == null ? null :\n" +
// This cast is necessary when the target table is
// an "extends"
" " + targetCast() +
"get" + table.dsd.databaseClass + "()." +
targetTableAccessorMethod + "()." +
"get" + targetSuffix + "Object(troid);\n" +
" }\n" +
"\n" +
" public void set" + suffix + "(" + type + " cooked)\n" +
" throws AccessPoemException {\n" +
" set" + suffix + "Troid(cooked == null ? null : cooked.troid());\n" +
" }\n");
}
|
diff --git a/cspi-schema/src/main/java/org/collectionspace/chain/csp/schema/EmailData.java b/cspi-schema/src/main/java/org/collectionspace/chain/csp/schema/EmailData.java
index 29ea6ddb..9160ef6a 100644
--- a/cspi-schema/src/main/java/org/collectionspace/chain/csp/schema/EmailData.java
+++ b/cspi-schema/src/main/java/org/collectionspace/chain/csp/schema/EmailData.java
@@ -1,94 +1,94 @@
/* Copyright 2010 University of Cambridge
* Licensed under the Educational Community License (ECL), Version 2.0. You may not use this file except in
* compliance with this License.
*
* You may obtain a copy of the ECL 2.0 License at https://source.collectionspace.org/collection-space/LICENSE.txt
*/
package org.collectionspace.chain.csp.schema;
import org.collectionspace.chain.csp.config.ReadOnlySection;
import org.json.JSONException;
import org.json.JSONObject;
/**
*
* @author caret
* all email specific data from the cspace-config.xml file that is parsed when the server starts up
* will hold static data e.g. content of emails, from addresses
*
*/
public class EmailData {
String baseurl,fromaddress,toaddress,loginurl ;
String smtphost,smtpport,smtppass,smtpuser;
Boolean smtpdebug,smtpauth;
String pswdmsg, pswdsubj, tokenvalid;
/* <email>
<baseurl>hendecasyllabic.local:8180</baseurl>
<from>[email protected]</from>
<to></to><!-- if specified then all emails will send to this address - used for debugging -->
<smtp>
<host>localhost</host>
<port>25</port>
<debug>false</debug>
<auth enabled="false"> <!-- set to true if wish to use auth -->
<username></username>
<password></password>
</auth>
</smtp>
<passwordreset>
<subject>CollectionSpace Password reset request</subject>
<message>A password reset has been requested from this email. If you wish to reset your password please click on this link {{link}}.</message>
</passwordreset>
</email>
*/
public EmailData(Spec spec, ReadOnlySection section) {
baseurl=(String)section.getValue("/baseurl");
fromaddress=(String)section.getValue("/from");
toaddress=(String)section.getValue("/to");
smtphost = (String)section.getValue("/smtp/host");
smtpport = (String)section.getValue("/smtp/port");
smtpdebug = Util.getBooleanOrDefault(section,"/smtp/debug",false);
smtpauth = Util.getBooleanOrDefault(section,"/smtp/auth/@enabled",false);
smtppass = (String)section.getValue("/smtp/auth/password");
smtpuser = (String)section.getValue("/smtp/auth/username");
pswdmsg = (String)section.getValue("/passwordreset/message");
pswdsubj = (String)section.getValue("/passwordreset/subject");
- loginurl = (String)section.getValue("/loginpage");
+ loginurl = (String)section.getValue("/passwordreset/loginpage");
tokenvalid = Util.getStringOrDefault(section, "/passwordreset/token/daysvalid", "7");
}
public String getLoginUrl() {return loginurl; }
public String getBaseURL() { return baseurl; }
public String getFromAddress() { return fromaddress; }
public String getToAddress() { return toaddress; }
public String getSMTPPort() { return smtpport; }
public String getSMTPHost() { return smtphost; }
public Boolean doSMTPDebug() { return smtpdebug; }
public String getPasswordResetMessage() { return pswdmsg; }
public String getPasswordResetSubject() { return pswdsubj; }
public Integer getTokenValidForLength() { return Integer.parseInt(tokenvalid); }
public Boolean doSMTPAuth() { return smtpauth; }
public String getSMTPAuthPassword() { if(smtpauth){ return smtppass;} else {return null;} }
public String getSMTPAuthUsername() { if(smtpauth){ return smtpuser;} else {return null;} }
public EmailData getEmailData() { return this; }
void dumpJson(JSONObject out) throws JSONException {
JSONObject record = new JSONObject();
record.put("baseurl", baseurl);
record.put("getFromAddress", fromaddress);
record.put("getToAddress", toaddress);
record.put("getPasswordResetMessage", pswdmsg);
record.put("getPasswordResetSubject", pswdsubj);
record.put("getTokenValidForLength", tokenvalid);
out.put("EmailData", record);
}
}
| true | true | public EmailData(Spec spec, ReadOnlySection section) {
baseurl=(String)section.getValue("/baseurl");
fromaddress=(String)section.getValue("/from");
toaddress=(String)section.getValue("/to");
smtphost = (String)section.getValue("/smtp/host");
smtpport = (String)section.getValue("/smtp/port");
smtpdebug = Util.getBooleanOrDefault(section,"/smtp/debug",false);
smtpauth = Util.getBooleanOrDefault(section,"/smtp/auth/@enabled",false);
smtppass = (String)section.getValue("/smtp/auth/password");
smtpuser = (String)section.getValue("/smtp/auth/username");
pswdmsg = (String)section.getValue("/passwordreset/message");
pswdsubj = (String)section.getValue("/passwordreset/subject");
loginurl = (String)section.getValue("/loginpage");
tokenvalid = Util.getStringOrDefault(section, "/passwordreset/token/daysvalid", "7");
}
| public EmailData(Spec spec, ReadOnlySection section) {
baseurl=(String)section.getValue("/baseurl");
fromaddress=(String)section.getValue("/from");
toaddress=(String)section.getValue("/to");
smtphost = (String)section.getValue("/smtp/host");
smtpport = (String)section.getValue("/smtp/port");
smtpdebug = Util.getBooleanOrDefault(section,"/smtp/debug",false);
smtpauth = Util.getBooleanOrDefault(section,"/smtp/auth/@enabled",false);
smtppass = (String)section.getValue("/smtp/auth/password");
smtpuser = (String)section.getValue("/smtp/auth/username");
pswdmsg = (String)section.getValue("/passwordreset/message");
pswdsubj = (String)section.getValue("/passwordreset/subject");
loginurl = (String)section.getValue("/passwordreset/loginpage");
tokenvalid = Util.getStringOrDefault(section, "/passwordreset/token/daysvalid", "7");
}
|
diff --git a/core/src/com/google/zxing/common/GridSampler.java b/core/src/com/google/zxing/common/GridSampler.java
index 93a39005..b847a31e 100644
--- a/core/src/com/google/zxing/common/GridSampler.java
+++ b/core/src/com/google/zxing/common/GridSampler.java
@@ -1,169 +1,169 @@
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.common;
import com.google.zxing.MonochromeBitmapSource;
import com.google.zxing.ReaderException;
/**
* Implementations of this class can, given locations of finder patterns for a QR code in an
* image, sample the right points in the image to reconstruct the QR code, accounting for
* perspective distortion. It is abstracted since it is relatively expensive and should be allowed
* to take advantage of platform-specific optimized implementations, like Sun's Java Advanced
* Imaging library, but which may not be available in other environments such as J2ME, and vice
* versa.
*
* The implementation used can be controlled by calling {@link #setGridSampler(GridSampler)}
* with an instance of a class which implements this interface.
*
* @author [email protected] (Sean Owen)
*/
public abstract class GridSampler {
private static GridSampler gridSampler = null;
/**
* Sets the implementation of {@link GridSampler} used by the library. One global
* instance is stored, which may sound problematic. But, the implementation provided
* ought to be appropriate for the entire platform, and all uses of this library
* in the whole lifetime of the JVM. For instance, an Android activity can swap in
* an implementation that takes advantage of native platform libraries.
*
* @param newGridSampler
*/
public static void setGridSampler(GridSampler newGridSampler) {
if (newGridSampler == null) {
throw new IllegalArgumentException();
}
gridSampler = newGridSampler;
}
/**
* @return the current implementation of {@link GridSampler}
*/
public static GridSampler getInstance() {
// No real point in trying to make this thread-safe;
// doesn't matter if a second instance is created
if (gridSampler == null) {
gridSampler = new DefaultGridSampler();
}
return gridSampler;
}
/**
* <p>Samples an image for a square matrix of bits of the given dimension. This is used to extract the
* black/white modules of a 2D barcode like a QR Code found in an image. Because this barcode may be
* rotated or perspective-distorted, the caller supplies four points in the source image that define
* known points in the barcode, so that the image may be sampled appropriately.</p>
*
* <p>The last eight "from" parameters are four X/Y coordinate pairs of locations of points in
* the image that define some significant points in the image to be sample. For example,
* these may be the location of finder pattern in a QR Code.</p>
*
* <p>The first eight "to" parameters are four X/Y coordinate pairs measured in the destination
* {@link BitMatrix}, from the top left, where the known points in the image given by the "from" parameters
* map to.</p>
*
* <p>These 16 parameters define the transformation needed to sample the image.</p>
*
* @param image image to sample
* @param dimension width/height of {@link BitMatrix} to sample from iamge
* @return {@link BitMatrix} representing a grid of points sampled from the image within a region
* defined by the "from" parameters
* @throws ReaderException if image can't be sampled, for example, if the transformation defined by
* the given points is invalid or results in sampling outside the image boundaries
*/
public abstract BitMatrix sampleGrid(MonochromeBitmapSource image,
int dimension,
float p1ToX, float p1ToY,
float p2ToX, float p2ToY,
float p3ToX, float p3ToY,
float p4ToX, float p4ToY,
float p1FromX, float p1FromY,
float p2FromX, float p2FromY,
float p3FromX, float p3FromY,
float p4FromX, float p4FromY) throws ReaderException;
/**
* <p>Checks a set of points that have been transformed to sample points on an image against
* the image's dimensions to see if the point are even within the image.</p>
*
* <p>This method will actually "nudge" the endpoints back onto the image if they are found to be barely
* (less than 1 pixel) off the image. This accounts for imperfect detection of finder patterns in an image
* where the QR Code runs all the way to the image border.</p>
*
* <p>For efficiency, the method will check points from either end of the line until one is found
* to be within the image. Because the set of points are assumed to be linear, this is valid.</p>
*
* @param image image into which the points should map
* @param points actual points in x1,y1,...,xn,yn form
* @throws ReaderException if an endpoint is lies outside the image boundaries
*/
- static void checkAndNudgePoints(MonochromeBitmapSource image, float[] points) throws ReaderException {
+ protected static void checkAndNudgePoints(MonochromeBitmapSource image, float[] points) throws ReaderException {
int width = image.getWidth();
int height = image.getHeight();
// Check and nudge points from start until we see some that are OK:
boolean nudged = true;
for (int offset = 0; offset < points.length && nudged; offset += 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
// Check and nudge points from end:
nudged = true;
for (int offset = points.length - 2; offset >= 0 && nudged; offset -= 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
}
}
| true | true | static void checkAndNudgePoints(MonochromeBitmapSource image, float[] points) throws ReaderException {
int width = image.getWidth();
int height = image.getHeight();
// Check and nudge points from start until we see some that are OK:
boolean nudged = true;
for (int offset = 0; offset < points.length && nudged; offset += 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
// Check and nudge points from end:
nudged = true;
for (int offset = points.length - 2; offset >= 0 && nudged; offset -= 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
}
| protected static void checkAndNudgePoints(MonochromeBitmapSource image, float[] points) throws ReaderException {
int width = image.getWidth();
int height = image.getHeight();
// Check and nudge points from start until we see some that are OK:
boolean nudged = true;
for (int offset = 0; offset < points.length && nudged; offset += 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
// Check and nudge points from end:
nudged = true;
for (int offset = points.length - 2; offset >= 0 && nudged; offset -= 2) {
int x = (int) points[offset];
int y = (int) points[offset + 1];
if (x < -1 || x > width || y < -1 || y > height) {
throw new ReaderException("Transformed point out of bounds at " + x + ',' + y);
}
nudged = false;
if (x == -1) {
points[offset] = 0.0f;
nudged = true;
} else if (x == width) {
points[offset] = width - 1;
nudged = true;
}
if (y == -1) {
points[offset + 1] = 0.0f;
nudged = true;
} else if (y == height) {
points[offset + 1] = height - 1;
nudged = true;
}
}
}
|
diff --git a/Essentials/src/com/earth2me/essentials/EssentialsEcoBlockListener.java b/Essentials/src/com/earth2me/essentials/EssentialsEcoBlockListener.java
index aac9feff..39552925 100644
--- a/Essentials/src/com/earth2me/essentials/EssentialsEcoBlockListener.java
+++ b/Essentials/src/com/earth2me/essentials/EssentialsEcoBlockListener.java
@@ -1,250 +1,250 @@
package com.earth2me.essentials;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.Material;
import org.bukkit.block.Sign;
import org.bukkit.craftbukkit.block.CraftSign;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockListener;
import org.bukkit.event.block.SignChangeEvent;
import org.bukkit.inventory.ItemStack;
public class EssentialsEcoBlockListener extends BlockListener
{
Essentials ess;
private static final Logger logger = Logger.getLogger("Minecraft");
public EssentialsEcoBlockListener(Essentials ess)
{
this.ess = ess;
}
@Override
public void onBlockBreak(BlockBreakEvent event)
{
if (event.isCancelled())
{
return;
}
if (ess.getSettings().areSignsDisabled())
{
return;
}
User user = ess.getUser(event.getPlayer());
String username = user.getName().substring(0, user.getName().length() > 13 ? 13 : user.getName().length());
if (event.getBlock().getType() != Material.WALL_SIGN && event.getBlock().getType() != Material.SIGN_POST)
{
return;
}
Sign sign = new CraftSign(event.getBlock());
if (sign.getLine(0).equals("§1[Trade]"))
{
if (!sign.getLine(3).substring(2).equals(username))
{
if (!user.isOp())
{
event.setCancelled(true);
}
return;
}
try
{
String[] l1 = sign.getLines()[1].split("[ :-]+");
String[] l2 = sign.getLines()[2].split("[ :-]+");
boolean m1 = l1[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
boolean m2 = l2[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
double q1 = Double.parseDouble(m1 ? l1[0].substring(1) : l1[0]);
double q2 = Double.parseDouble(m2 ? l2[0].substring(1) : l2[0]);
double r1 = Double.parseDouble(l1[m1 ? 1 : 2]);
double r2 = Double.parseDouble(l2[m2 ? 1 : 2]);
if ((!m1 & q1 < 1) || (!m2 & q2 < 1))
{
throw new Exception(Util.i18n("moreThanZero"));
}
ItemStack i1 = m1 || r1 <= 0 ? null : ItemDb.get(l1[1], (int)r1);
ItemStack i2 = m2 || r2 <= 0 ? null : ItemDb.get(l2[1], (int)r2);
if (m1)
{
user.giveMoney(r1);
}
else if (i1 != null)
{
Map<Integer, ItemStack> leftOver = user.getInventory().addItem(i1);
for (ItemStack itemStack : leftOver.values())
{
InventoryWorkaround.dropItem(user.getLocation(), itemStack);
}
}
if (m2)
{
user.giveMoney(r2);
}
else if (i2 != null)
{
Map<Integer, ItemStack> leftOver = user.getInventory().addItem(i2);
for (ItemStack itemStack : leftOver.values())
{
InventoryWorkaround.dropItem(user.getLocation(), itemStack);
}
}
user.updateInventory();
sign.setType(Material.AIR);
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
}
return;
}
}
@Override
public void onSignChange(SignChangeEvent event)
{
if (ess.getSettings().areSignsDisabled())
{
return;
}
User user = ess.getUser(event.getPlayer());
String username = user.getName().substring(0, user.getName().length() > 13 ? 13 : user.getName().length());
if ((event.getLine(0).equalsIgnoreCase("[Buy]") || event.getLine(0).equalsIgnoreCase("#1[Buy]")) && user.isAuthorized("essentials.signs.buy.create"))
{
try
{
event.setLine(0, "§1[Buy]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Don't sell air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Buy]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Sell]") || event.getLine(0).equalsIgnoreCase("#1[Sell]")) && user.isAuthorized("essentials.signs.sell.create"))
{
try
{
event.setLine(0, "§1[Sell]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Can't buy air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Sell]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Trade]") || event.getLine(0).equalsIgnoreCase("#1[Trade]")) && user.isAuthorized("essentials.signs.trade.create"))
{
try
{
String[] l1 = event.getLine(1).split("[ :-]+");
String[] l2 = event.getLine(2).split("[ :-]+");
boolean m1 = l1[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
boolean m2 = l2[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
double q1 = Double.parseDouble(m1 ? l1[0].substring(1) : l1[0]);
double q2 = Double.parseDouble(m2 ? l2[0].substring(1) : l2[0]);
- if (m1 ? l2.length != 1 : l2.length != 2)
+ if (m1 ? l1.length != 1 : l1.length != 2)
{
throw new Exception(Util.format("invalidSignLine", 2));
}
if (m2 ? l2.length != 2 : l2.length != 3)
{
throw new Exception(Util.format("invalidSignLine", 3));
}
double r2 = Double.parseDouble(l2[m2 ? 1 : 2]);
r2 = m2 ? r2 : r2 - r2 % q2;
if ((!m1 & q1 < 1) || (!m2 & q2 < 1) || r2 < 1)
{
throw new Exception(Util.i18n("moreThanZero"));
}
if (!m1)
{
ItemDb.get(l1[1]);
}
if (m2)
{
if (user.getMoney() < r2)
{
throw new Exception(Util.i18n("notEnoughMoney"));
}
user.takeMoney(r2);
//user.sendMessage("r2: " + r2 + " q2: " + q2);
}
else
{
ItemStack i2 = ItemDb.get(l2[1], (int)r2);
if (!InventoryWorkaround.containsItem(user.getInventory(), true, i2))
{
throw new Exception(Util.format("missingItems", (int)r2, l2[1]));
}
InventoryWorkaround.removeItem(user.getInventory(), true, i2);
user.updateInventory();
}
event.setLine(0, "§1[Trade]");
event.setLine(1, (m1 ? Util.formatCurrency(q1) : (int)q1 + " " + l1[1]) + ":0");
event.setLine(2, (m2 ? Util.formatCurrency(q2) : (int)q2 + " " + l2[1]) + ":" + (m2 ? Util.roundDouble(r2) : "" + (int)r2));
event.setLine(3, "§8" + username);
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Trade]");
event.setLine(1, "# ItemOr" + ess.getSettings().getCurrencySymbol());
event.setLine(2, "# ItemOr" + ess.getSettings().getCurrencySymbol() + ":#");
event.setLine(3, "§8" + username);
}
return;
}
}
}
| true | true | public void onSignChange(SignChangeEvent event)
{
if (ess.getSettings().areSignsDisabled())
{
return;
}
User user = ess.getUser(event.getPlayer());
String username = user.getName().substring(0, user.getName().length() > 13 ? 13 : user.getName().length());
if ((event.getLine(0).equalsIgnoreCase("[Buy]") || event.getLine(0).equalsIgnoreCase("#1[Buy]")) && user.isAuthorized("essentials.signs.buy.create"))
{
try
{
event.setLine(0, "§1[Buy]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Don't sell air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Buy]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Sell]") || event.getLine(0).equalsIgnoreCase("#1[Sell]")) && user.isAuthorized("essentials.signs.sell.create"))
{
try
{
event.setLine(0, "§1[Sell]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Can't buy air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Sell]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Trade]") || event.getLine(0).equalsIgnoreCase("#1[Trade]")) && user.isAuthorized("essentials.signs.trade.create"))
{
try
{
String[] l1 = event.getLine(1).split("[ :-]+");
String[] l2 = event.getLine(2).split("[ :-]+");
boolean m1 = l1[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
boolean m2 = l2[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
double q1 = Double.parseDouble(m1 ? l1[0].substring(1) : l1[0]);
double q2 = Double.parseDouble(m2 ? l2[0].substring(1) : l2[0]);
if (m1 ? l2.length != 1 : l2.length != 2)
{
throw new Exception(Util.format("invalidSignLine", 2));
}
if (m2 ? l2.length != 2 : l2.length != 3)
{
throw new Exception(Util.format("invalidSignLine", 3));
}
double r2 = Double.parseDouble(l2[m2 ? 1 : 2]);
r2 = m2 ? r2 : r2 - r2 % q2;
if ((!m1 & q1 < 1) || (!m2 & q2 < 1) || r2 < 1)
{
throw new Exception(Util.i18n("moreThanZero"));
}
if (!m1)
{
ItemDb.get(l1[1]);
}
if (m2)
{
if (user.getMoney() < r2)
{
throw new Exception(Util.i18n("notEnoughMoney"));
}
user.takeMoney(r2);
//user.sendMessage("r2: " + r2 + " q2: " + q2);
}
else
{
ItemStack i2 = ItemDb.get(l2[1], (int)r2);
if (!InventoryWorkaround.containsItem(user.getInventory(), true, i2))
{
throw new Exception(Util.format("missingItems", (int)r2, l2[1]));
}
InventoryWorkaround.removeItem(user.getInventory(), true, i2);
user.updateInventory();
}
event.setLine(0, "§1[Trade]");
event.setLine(1, (m1 ? Util.formatCurrency(q1) : (int)q1 + " " + l1[1]) + ":0");
event.setLine(2, (m2 ? Util.formatCurrency(q2) : (int)q2 + " " + l2[1]) + ":" + (m2 ? Util.roundDouble(r2) : "" + (int)r2));
event.setLine(3, "§8" + username);
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Trade]");
event.setLine(1, "# ItemOr" + ess.getSettings().getCurrencySymbol());
event.setLine(2, "# ItemOr" + ess.getSettings().getCurrencySymbol() + ":#");
event.setLine(3, "§8" + username);
}
return;
}
}
| public void onSignChange(SignChangeEvent event)
{
if (ess.getSettings().areSignsDisabled())
{
return;
}
User user = ess.getUser(event.getPlayer());
String username = user.getName().substring(0, user.getName().length() > 13 ? 13 : user.getName().length());
if ((event.getLine(0).equalsIgnoreCase("[Buy]") || event.getLine(0).equalsIgnoreCase("#1[Buy]")) && user.isAuthorized("essentials.signs.buy.create"))
{
try
{
event.setLine(0, "§1[Buy]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Don't sell air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Buy]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Sell]") || event.getLine(0).equalsIgnoreCase("#1[Sell]")) && user.isAuthorized("essentials.signs.sell.create"))
{
try
{
event.setLine(0, "§1[Sell]");
event.setLine(1, "" + Math.abs(Integer.parseInt(event.getLine(1))));
ItemStack is = ItemDb.get(event.getLine(2));
if (is.getTypeId() == 0 || Math.abs(Integer.parseInt(event.getLine(1))) == 0)
{
throw new Exception("Can't buy air.");
}
double price = Double.parseDouble(event.getLine(3).replaceAll("[^0-9\\.]", ""));
event.setLine(3, Util.formatCurrency(price));
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Sell]");
event.setLine(1, "#");
event.setLine(2, "Item");
event.setLine(3, "$Price");
}
return;
}
if ((event.getLine(0).equalsIgnoreCase("[Trade]") || event.getLine(0).equalsIgnoreCase("#1[Trade]")) && user.isAuthorized("essentials.signs.trade.create"))
{
try
{
String[] l1 = event.getLine(1).split("[ :-]+");
String[] l2 = event.getLine(2).split("[ :-]+");
boolean m1 = l1[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
boolean m2 = l2[0].matches("[^0-9][0-9]+(\\.[0-9]+)?");
double q1 = Double.parseDouble(m1 ? l1[0].substring(1) : l1[0]);
double q2 = Double.parseDouble(m2 ? l2[0].substring(1) : l2[0]);
if (m1 ? l1.length != 1 : l1.length != 2)
{
throw new Exception(Util.format("invalidSignLine", 2));
}
if (m2 ? l2.length != 2 : l2.length != 3)
{
throw new Exception(Util.format("invalidSignLine", 3));
}
double r2 = Double.parseDouble(l2[m2 ? 1 : 2]);
r2 = m2 ? r2 : r2 - r2 % q2;
if ((!m1 & q1 < 1) || (!m2 & q2 < 1) || r2 < 1)
{
throw new Exception(Util.i18n("moreThanZero"));
}
if (!m1)
{
ItemDb.get(l1[1]);
}
if (m2)
{
if (user.getMoney() < r2)
{
throw new Exception(Util.i18n("notEnoughMoney"));
}
user.takeMoney(r2);
//user.sendMessage("r2: " + r2 + " q2: " + q2);
}
else
{
ItemStack i2 = ItemDb.get(l2[1], (int)r2);
if (!InventoryWorkaround.containsItem(user.getInventory(), true, i2))
{
throw new Exception(Util.format("missingItems", (int)r2, l2[1]));
}
InventoryWorkaround.removeItem(user.getInventory(), true, i2);
user.updateInventory();
}
event.setLine(0, "§1[Trade]");
event.setLine(1, (m1 ? Util.formatCurrency(q1) : (int)q1 + " " + l1[1]) + ":0");
event.setLine(2, (m2 ? Util.formatCurrency(q2) : (int)q2 + " " + l2[1]) + ":" + (m2 ? Util.roundDouble(r2) : "" + (int)r2));
event.setLine(3, "§8" + username);
}
catch (Throwable ex)
{
user.sendMessage(Util.format("errorWithMessage", ex.getMessage()));
if (ess.getSettings().isDebug())
{
logger.log(Level.WARNING, ex.getMessage(), ex);
}
event.setLine(0, "§4[Trade]");
event.setLine(1, "# ItemOr" + ess.getSettings().getCurrencySymbol());
event.setLine(2, "# ItemOr" + ess.getSettings().getCurrencySymbol() + ":#");
event.setLine(3, "§8" + username);
}
return;
}
}
|
diff --git a/src/java/net/sf/jabref/oo/AlphanumericComparator.java b/src/java/net/sf/jabref/oo/AlphanumericComparator.java
index aabbca29c..93c5d69d0 100755
--- a/src/java/net/sf/jabref/oo/AlphanumericComparator.java
+++ b/src/java/net/sf/jabref/oo/AlphanumericComparator.java
@@ -1,51 +1,64 @@
/* Copyright (C) 2003-2011 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.oo;
import net.sf.jabref.BibtexEntry;
import net.sf.jabref.FieldComparator;
import java.util.Comparator;
/**
* Comparator for sorting bibliography entries.
*
* TODO: is it sufficient with a hardcoded sort algorithm for the bibliography?
*/
public class AlphanumericComparator implements Comparator<BibtexEntry> {
FieldComparator authComp = new FieldComparator("author"),
editorComp = new FieldComparator("editor"),
yearComp = new FieldComparator("year");
public AlphanumericComparator() {
}
public int compare(BibtexEntry o1, BibtexEntry o2) {
// Author as first criterion:
int comp = authComp.compare(o1, o2);
if (comp != 0)
return comp;
- // TODO: Is it a good idea to try editor if author fields are equal?
+ // Editor as second criterion:
comp = editorComp.compare(o1, o2);
if (comp != 0)
return comp;
// Year as next criterion:
- return yearComp.compare(o1, o2);
+ comp = yearComp.compare(o1, o2);
+ if (comp != 0)
+ return comp;
+ // Bibtex key as next criterion:
+ String k1 = o1.getCiteKey(), k2 = o2.getCiteKey();
+ if (k1 != null) {
+ if (k2 != null)
+ return k1.compareTo(k2);
+ else
+ return 1;
+ }
+ else if (k2 != null)
+ return -1;
+ else return 0;
}
}
| false | true | public int compare(BibtexEntry o1, BibtexEntry o2) {
// Author as first criterion:
int comp = authComp.compare(o1, o2);
if (comp != 0)
return comp;
// TODO: Is it a good idea to try editor if author fields are equal?
comp = editorComp.compare(o1, o2);
if (comp != 0)
return comp;
// Year as next criterion:
return yearComp.compare(o1, o2);
}
| public int compare(BibtexEntry o1, BibtexEntry o2) {
// Author as first criterion:
int comp = authComp.compare(o1, o2);
if (comp != 0)
return comp;
// Editor as second criterion:
comp = editorComp.compare(o1, o2);
if (comp != 0)
return comp;
// Year as next criterion:
comp = yearComp.compare(o1, o2);
if (comp != 0)
return comp;
// Bibtex key as next criterion:
String k1 = o1.getCiteKey(), k2 = o2.getCiteKey();
if (k1 != null) {
if (k2 != null)
return k1.compareTo(k2);
else
return 1;
}
else if (k2 != null)
return -1;
else return 0;
}
|
diff --git a/src/com/martinbrook/tesseractuhc/UhcParticipant.java b/src/com/martinbrook/tesseractuhc/UhcParticipant.java
index fbac535..0cab0de 100644
--- a/src/com/martinbrook/tesseractuhc/UhcParticipant.java
+++ b/src/com/martinbrook/tesseractuhc/UhcParticipant.java
@@ -1,185 +1,185 @@
package com.martinbrook.tesseractuhc;
import java.util.HashSet;
import org.bukkit.ChatColor;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import com.martinbrook.tesseractuhc.startpoint.UhcStartPoint;
public class UhcParticipant implements PlayerTarget {
private boolean launched = false;
private UhcTeam team;
private HashSet<PlayerTarget> nearbyTargets = new HashSet<PlayerTarget>();
private UhcPlayer player;
private boolean dead = false;
private boolean miningFatigueAlerted = false;
private int miningFatigueGrace = 20;
private long lastDamageTime = 0;
private boolean warnedHardStone = false;
public UhcParticipant(UhcPlayer pl, UhcTeam team) {
this.player = pl;
this.team = team;
}
public String getName() {
return player.getName();
}
public boolean isLaunched() {
return launched;
}
public void setLaunched(boolean launched) {
this.launched = launched;
}
public UhcStartPoint getStartPoint() {
return team.getStartPoint();
}
public boolean isDead() {
return dead;
}
public void setDead(boolean dead) {
this.dead = dead;
}
public UhcTeam getTeam() {
return team;
}
public boolean isNearTo(PlayerTarget target) {
return nearbyTargets.contains(target);
}
public void setNearTo(PlayerTarget target, boolean b) {
if (b)
nearbyTargets.add(target);
else
nearbyTargets.remove(target);
}
public boolean teleport(Player p) { return this.teleport(p, "You have been teleported!"); }
public boolean teleport(Location l) { return this.teleport(l, "You have been teleported!"); }
public boolean teleport(Player p, String message) { return this.teleport(p.getLocation(), message); }
public boolean teleport(Location l, String message) { return player.teleport(l, message); }
public UhcPlayer getPlayer() { return player; }
public boolean sendToStartPoint() {
return (player.setGameMode(GameMode.ADVENTURE) && teleport(getStartPoint().getLocation()) && player.renew());
}
public boolean start() {
return (player.feed() && player.clearXP() && player.clearPotionEffects()
&& player.heal() && player.setGameMode(GameMode.SURVIVAL));
}
public boolean sendMessage(String message) { return player.sendMessage(message); }
/**
* Apply the mining fatigue game mechanic
*
* Players who mine stone below a certain depth increase their hunger
*
* @param player The player to act upon
* @param blockY The Y coordinate of the mined block
*/
public void doMiningFatigue(int blockY) {
Double exhaustion = 0.0;
if (blockY < UhcMatch.DIAMOND_LAYER) {
exhaustion = this.player.getMatch().getConfig().getMiningFatigueDiamond();
} else if (blockY < UhcMatch.GOLD_LAYER) {
exhaustion = this.player.getMatch().getConfig().getMiningFatigueGold();
}
if (exhaustion > 0) {
if (!miningFatigueAlerted) {
sendMessage(ChatColor.GOLD + "Warning: mining at this depth will soon make you very hungry!");
miningFatigueAlerted=true;
}
if (miningFatigueGrace > 0) {
if (--miningFatigueGrace == 0)
sendMessage(ChatColor.GOLD + "Warning: mining any more at this depth will make you very hungry!");
} else {
player.getPlayer().setExhaustion((float) (player.getPlayer().getExhaustion() + exhaustion));
player.getPlayer().addPotionEffect(new PotionEffect(PotionEffectType.SLOW_DIGGING, 1200, 0));
}
}
}
/**
* Apply the hard stone game mechanic
*
* Players who mine stone below a certain depth increase their hunger
*
* @param blockY The Y coordinate of the mined block
* @param tool The tool that was used to mine the block
*/
public void doHardStone(int blockY, ItemStack tool) {
// Calculate applicable durability penalty
short penalty;
if (tool.getType() == Material.GOLD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_GOLD;
} else if (tool.getType() == Material.WOOD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_WOOD;
} else if (tool.getType() == Material.STONE_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_STONE;
} else if (tool.getType() == Material.IRON_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_IRON;
} else if (tool.getType() == Material.DIAMOND_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_DIAMOND;
} else return;
// Warn the player the first time
if (!warnedHardStone) {
player.sendMessage(ChatColor.GOLD + "Warning! Mining smoothstone will wear out your tools more quickly than in normal Minecraft.");
warnedHardStone=true;
}
// Apply durability cost
- tool.setDurability((short) (tool.getDurability() - penalty));
+ tool.setDurability((short) (tool.getDurability() + penalty));
}
/**
* Mark the player as having taken damage
*/
public void setDamageTimer() {
lastDamageTime = player.getMatch().getStartingWorld().getFullTime();
}
/**
* @return whether the player has taken damage recently
*/
public boolean isRecentlyDamaged() {
return (player.getMatch().getStartingWorld().getFullTime() - lastDamageTime < UhcMatch.PLAYER_DAMAGE_ALERT_TICKS);
}
}
| true | true | public void doHardStone(int blockY, ItemStack tool) {
// Calculate applicable durability penalty
short penalty;
if (tool.getType() == Material.GOLD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_GOLD;
} else if (tool.getType() == Material.WOOD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_WOOD;
} else if (tool.getType() == Material.STONE_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_STONE;
} else if (tool.getType() == Material.IRON_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_IRON;
} else if (tool.getType() == Material.DIAMOND_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_DIAMOND;
} else return;
// Warn the player the first time
if (!warnedHardStone) {
player.sendMessage(ChatColor.GOLD + "Warning! Mining smoothstone will wear out your tools more quickly than in normal Minecraft.");
warnedHardStone=true;
}
// Apply durability cost
tool.setDurability((short) (tool.getDurability() - penalty));
}
| public void doHardStone(int blockY, ItemStack tool) {
// Calculate applicable durability penalty
short penalty;
if (tool.getType() == Material.GOLD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_GOLD;
} else if (tool.getType() == Material.WOOD_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_WOOD;
} else if (tool.getType() == Material.STONE_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_STONE;
} else if (tool.getType() == Material.IRON_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_IRON;
} else if (tool.getType() == Material.DIAMOND_PICKAXE) {
penalty = UhcMatch.DURABILITY_PENALTY_DIAMOND;
} else return;
// Warn the player the first time
if (!warnedHardStone) {
player.sendMessage(ChatColor.GOLD + "Warning! Mining smoothstone will wear out your tools more quickly than in normal Minecraft.");
warnedHardStone=true;
}
// Apply durability cost
tool.setDurability((short) (tool.getDurability() + penalty));
}
|
diff --git a/src/com/android/launcher2/LauncherModel.java b/src/com/android/launcher2/LauncherModel.java
index bc88a987..fc1a26d4 100644
--- a/src/com/android/launcher2/LauncherModel.java
+++ b/src/com/android/launcher2/LauncherModel.java
@@ -1,2174 +1,2176 @@
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.launcher2;
import android.app.SearchManager;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProviderInfo;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.ContentProviderClient;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.Intent.ShortcutIconResource;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.pm.ResolveInfo;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Parcelable;
import android.os.Process;
import android.os.RemoteException;
import android.os.SystemClock;
import android.util.Log;
import com.android.launcher.R;
import com.android.launcher2.InstallWidgetReceiver.WidgetMimeTypeHandlerData;
import java.lang.ref.WeakReference;
import java.net.URISyntaxException;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
/**
* Maintains in-memory state of the Launcher. It is expected that there should be only one
* LauncherModel object held in a static. Also provide APIs for updating the database state
* for the Launcher.
*/
public class LauncherModel extends BroadcastReceiver {
static final boolean DEBUG_LOADERS = false;
static final String TAG = "Launcher.Model";
private static final int ITEMS_CHUNK = 6; // batch size for the workspace icons
private final boolean mAppsCanBeOnExternalStorage;
private int mBatchSize; // 0 is all apps at once
private int mAllAppsLoadDelay; // milliseconds between batches
private final LauncherApplication mApp;
private final Object mLock = new Object();
private DeferredHandler mHandler = new DeferredHandler();
private LoaderTask mLoaderTask;
private static final HandlerThread sWorkerThread = new HandlerThread("launcher-loader");
static {
sWorkerThread.start();
}
private static final Handler sWorker = new Handler(sWorkerThread.getLooper());
// We start off with everything not loaded. After that, we assume that
// our monitoring of the package manager provides all updates and we never
// need to do a requery. These are only ever touched from the loader thread.
private boolean mWorkspaceLoaded;
private boolean mAllAppsLoaded;
private WeakReference<Callbacks> mCallbacks;
// < only access in worker thread >
private AllAppsList mAllAppsList;
// sItemsIdMap maps *all* the ItemInfos (shortcuts, folders, and widgets) created by
// LauncherModel to their ids
static final HashMap<Long, ItemInfo> sItemsIdMap = new HashMap<Long, ItemInfo>();
// sItems is passed to bindItems, which expects a list of all folders and shortcuts created by
// LauncherModel that are directly on the home screen (however, no widgets or shortcuts
// within folders).
static final ArrayList<ItemInfo> sWorkspaceItems = new ArrayList<ItemInfo>();
// sAppWidgets is all LauncherAppWidgetInfo created by LauncherModel. Passed to bindAppWidget()
static final ArrayList<LauncherAppWidgetInfo> sAppWidgets =
new ArrayList<LauncherAppWidgetInfo>();
// sFolders is all FolderInfos created by LauncherModel. Passed to bindFolders()
static final HashMap<Long, FolderInfo> sFolders = new HashMap<Long, FolderInfo>();
// sDbIconCache is the set of ItemInfos that need to have their icons updated in the database
static final HashMap<Object, byte[]> sDbIconCache = new HashMap<Object, byte[]>();
// </ only access in worker thread >
private IconCache mIconCache;
private Bitmap mDefaultIcon;
private static int mCellCountX;
private static int mCellCountY;
protected int mPreviousConfigMcc;
public interface Callbacks {
public boolean setLoadOnResume();
public int getCurrentWorkspaceScreen();
public void startBinding();
public void bindItems(ArrayList<ItemInfo> shortcuts, int start, int end);
public void bindFolders(HashMap<Long,FolderInfo> folders);
public void finishBindingItems();
public void bindAppWidget(LauncherAppWidgetInfo info);
public void bindAllApplications(ArrayList<ApplicationInfo> apps);
public void bindAppsAdded(ArrayList<ApplicationInfo> apps);
public void bindAppsUpdated(ArrayList<ApplicationInfo> apps);
public void bindAppsRemoved(ArrayList<ApplicationInfo> apps, boolean permanent);
public void bindPackagesUpdated();
public boolean isAllAppsVisible();
public boolean isAllAppsButtonRank(int rank);
public void bindSearchablesChanged();
}
LauncherModel(LauncherApplication app, IconCache iconCache) {
mAppsCanBeOnExternalStorage = !Environment.isExternalStorageEmulated();
mApp = app;
mAllAppsList = new AllAppsList(iconCache);
mIconCache = iconCache;
mDefaultIcon = Utilities.createIconBitmap(
mIconCache.getFullResDefaultActivityIcon(), app);
final Resources res = app.getResources();
mAllAppsLoadDelay = res.getInteger(R.integer.config_allAppsBatchLoadDelay);
mBatchSize = res.getInteger(R.integer.config_allAppsBatchSize);
Configuration config = res.getConfiguration();
mPreviousConfigMcc = config.mcc;
}
public Bitmap getFallbackIcon() {
return Bitmap.createBitmap(mDefaultIcon);
}
public void unbindWorkspaceItems() {
sWorker.post(new Runnable() {
@Override
public void run() {
unbindWorkspaceItemsOnMainThread();
}
});
}
/** Unbinds all the sWorkspaceItems on the main thread, and return a copy of sWorkspaceItems
* that is save to reference from the main thread. */
private ArrayList<ItemInfo> unbindWorkspaceItemsOnMainThread() {
// Ensure that we don't use the same workspace items data structure on the main thread
// by making a copy of workspace items first.
final ArrayList<ItemInfo> workspaceItems = new ArrayList<ItemInfo>(sWorkspaceItems);
final ArrayList<ItemInfo> appWidgets = new ArrayList<ItemInfo>(sAppWidgets);
mHandler.post(new Runnable() {
@Override
public void run() {
for (ItemInfo item : workspaceItems) {
item.unbind();
}
for (ItemInfo item : appWidgets) {
item.unbind();
}
}
});
return workspaceItems;
}
/**
* Adds an item to the DB if it was not created previously, or move it to a new
* <container, screen, cellX, cellY>
*/
static void addOrMoveItemInDatabase(Context context, ItemInfo item, long container,
int screen, int cellX, int cellY) {
if (item.container == ItemInfo.NO_ID) {
// From all apps
addItemToDatabase(context, item, container, screen, cellX, cellY, false);
} else {
// From somewhere else
moveItemInDatabase(context, item, container, screen, cellX, cellY);
}
}
static void updateItemInDatabaseHelper(Context context, final ContentValues values,
final ItemInfo item, final String callingFunction) {
final long itemId = item.id;
final Uri uri = LauncherSettings.Favorites.getContentUri(itemId, false);
final ContentResolver cr = context.getContentResolver();
Runnable r = new Runnable() {
public void run() {
cr.update(uri, values, null, null);
ItemInfo modelItem = sItemsIdMap.get(itemId);
if (item != modelItem) {
// the modelItem needs to match up perfectly with item if our model is to be
// consistent with the database-- for now, just require modelItem == item
String msg = "item: " + ((item != null) ? item.toString() : "null") +
"modelItem: " + ((modelItem != null) ? modelItem.toString() : "null") +
"Error: ItemInfo passed to " + callingFunction + " doesn't match original";
throw new RuntimeException(msg);
}
// Items are added/removed from the corresponding FolderInfo elsewhere, such
// as in Workspace.onDrop. Here, we just add/remove them from the list of items
// that are on the desktop, as appropriate
if (modelItem.container == LauncherSettings.Favorites.CONTAINER_DESKTOP ||
modelItem.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
if (!sWorkspaceItems.contains(modelItem)) {
sWorkspaceItems.add(modelItem);
}
} else {
sWorkspaceItems.remove(modelItem);
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Move an item in the DB to a new <container, screen, cellX, cellY>
*/
static void moveItemInDatabase(Context context, final ItemInfo item, final long container,
final int screen, final int cellX, final int cellY) {
item.container = container;
item.cellX = cellX;
item.cellY = cellY;
// We store hotseat items in canonical form which is this orientation invariant position
// in the hotseat
if (context instanceof Launcher && screen < 0 &&
container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
item.screen = ((Launcher) context).getHotseat().getOrderInHotseat(cellX, cellY);
} else {
item.screen = screen;
}
final ContentValues values = new ContentValues();
values.put(LauncherSettings.Favorites.CONTAINER, item.container);
values.put(LauncherSettings.Favorites.CELLX, item.cellX);
values.put(LauncherSettings.Favorites.CELLY, item.cellY);
values.put(LauncherSettings.Favorites.SCREEN, item.screen);
updateItemInDatabaseHelper(context, values, item, "moveItemInDatabase");
}
/**
* Move and/or resize item in the DB to a new <container, screen, cellX, cellY, spanX, spanY>
*/
static void modifyItemInDatabase(Context context, final ItemInfo item, final long container,
final int screen, final int cellX, final int cellY, final int spanX, final int spanY) {
item.container = container;
item.cellX = cellX;
item.cellY = cellY;
item.spanX = spanX;
item.spanY = spanY;
// We store hotseat items in canonical form which is this orientation invariant position
// in the hotseat
if (context instanceof Launcher && screen < 0 &&
container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
item.screen = ((Launcher) context).getHotseat().getOrderInHotseat(cellX, cellY);
} else {
item.screen = screen;
}
final ContentValues values = new ContentValues();
values.put(LauncherSettings.Favorites.CONTAINER, item.container);
values.put(LauncherSettings.Favorites.CELLX, item.cellX);
values.put(LauncherSettings.Favorites.CELLY, item.cellY);
values.put(LauncherSettings.Favorites.SPANX, item.spanX);
values.put(LauncherSettings.Favorites.SPANY, item.spanY);
values.put(LauncherSettings.Favorites.SCREEN, item.screen);
updateItemInDatabaseHelper(context, values, item, "moveItemInDatabase");
}
/**
* Update an item to the database in a specified container.
*/
static void updateItemInDatabase(Context context, final ItemInfo item) {
final ContentValues values = new ContentValues();
item.onAddToDatabase(values);
item.updateValuesWithCoordinates(values, item.cellX, item.cellY);
updateItemInDatabaseHelper(context, values, item, "updateItemInDatabase");
}
/**
* Returns true if the shortcuts already exists in the database.
* we identify a shortcut by its title and intent.
*/
static boolean shortcutExists(Context context, String title, Intent intent) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI,
new String[] { "title", "intent" }, "title=? and intent=?",
new String[] { title, intent.toUri(0) }, null);
boolean result = false;
try {
result = c.moveToFirst();
} finally {
c.close();
}
return result;
}
/**
* Returns an ItemInfo array containing all the items in the LauncherModel.
* The ItemInfo.id is not set through this function.
*/
static ArrayList<ItemInfo> getItemsInLocalCoordinates(Context context) {
ArrayList<ItemInfo> items = new ArrayList<ItemInfo>();
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] {
LauncherSettings.Favorites.ITEM_TYPE, LauncherSettings.Favorites.CONTAINER,
LauncherSettings.Favorites.SCREEN, LauncherSettings.Favorites.CELLX, LauncherSettings.Favorites.CELLY,
LauncherSettings.Favorites.SPANX, LauncherSettings.Favorites.SPANY }, null, null, null);
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANY);
try {
while (c.moveToNext()) {
ItemInfo item = new ItemInfo();
item.cellX = c.getInt(cellXIndex);
item.cellY = c.getInt(cellYIndex);
item.spanX = c.getInt(spanXIndex);
item.spanY = c.getInt(spanYIndex);
item.container = c.getInt(containerIndex);
item.itemType = c.getInt(itemTypeIndex);
item.screen = c.getInt(screenIndex);
items.add(item);
}
} catch (Exception e) {
items.clear();
} finally {
c.close();
}
return items;
}
/**
* Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList.
*/
FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null,
"_id=? and (itemType=? or itemType=?)",
new String[] { String.valueOf(id),
String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_FOLDER)}, null);
try {
if (c.moveToFirst()) {
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
FolderInfo folderInfo = null;
switch (c.getInt(itemTypeIndex)) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
folderInfo = findOrMakeFolder(folderList, id);
break;
}
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
folderInfo.container = c.getInt(containerIndex);
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
return folderInfo;
}
} finally {
c.close();
}
return null;
}
/**
* Add an item to the database in a specified container. Sets the container, screen, cellX and
* cellY fields of the item. Also assigns an ID to the item.
*/
static void addItemToDatabase(Context context, final ItemInfo item, final long container,
final int screen, final int cellX, final int cellY, final boolean notify) {
item.container = container;
item.cellX = cellX;
item.cellY = cellY;
// We store hotseat items in canonical form which is this orientation invariant position
// in the hotseat
if (context instanceof Launcher && screen < 0 &&
container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
item.screen = ((Launcher) context).getHotseat().getOrderInHotseat(cellX, cellY);
} else {
item.screen = screen;
}
final ContentValues values = new ContentValues();
final ContentResolver cr = context.getContentResolver();
item.onAddToDatabase(values);
LauncherApplication app = (LauncherApplication) context.getApplicationContext();
item.id = app.getLauncherProvider().generateNewId();
values.put(LauncherSettings.Favorites._ID, item.id);
item.updateValuesWithCoordinates(values, item.cellX, item.cellY);
Runnable r = new Runnable() {
public void run() {
cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI :
LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values);
if (sItemsIdMap.containsKey(item.id)) {
// we should not be adding new items in the db with the same id
throw new RuntimeException("Error: ItemInfo id (" + item.id + ") passed to " +
"addItemToDatabase already exists." + item.toString());
}
sItemsIdMap.put(item.id, item);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.put(item.id, (FolderInfo) item);
// Fall through
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
if (item.container == LauncherSettings.Favorites.CONTAINER_DESKTOP ||
item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
sWorkspaceItems.add(item);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.add((LauncherAppWidgetInfo) item);
break;
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Creates a new unique child id, for a given cell span across all layouts.
*/
static int getCellLayoutChildId(
long container, int screen, int localCellX, int localCellY, int spanX, int spanY) {
return (((int) container & 0xFF) << 24)
| (screen & 0xFF) << 16 | (localCellX & 0xFF) << 8 | (localCellY & 0xFF);
}
static int getCellCountX() {
return mCellCountX;
}
static int getCellCountY() {
return mCellCountY;
}
/**
* Updates the model orientation helper to take into account the current layout dimensions
* when performing local/canonical coordinate transformations.
*/
static void updateWorkspaceLayoutCells(int shortAxisCellCount, int longAxisCellCount) {
mCellCountX = shortAxisCellCount;
mCellCountY = longAxisCellCount;
}
/**
* Removes the specified item from the database
* @param context
* @param item
*/
static void deleteItemFromDatabase(Context context, final ItemInfo item) {
final ContentResolver cr = context.getContentResolver();
final Uri uriToDelete = LauncherSettings.Favorites.getContentUri(item.id, false);
Runnable r = new Runnable() {
public void run() {
cr.delete(uriToDelete, null, null);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.remove(item.id);
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.remove((LauncherAppWidgetInfo) item);
break;
}
sItemsIdMap.remove(item.id);
sDbIconCache.remove(item);
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Remove the contents of the specified folder from the database
*/
static void deleteFolderContentsFromDatabase(Context context, final FolderInfo info) {
final ContentResolver cr = context.getContentResolver();
Runnable r = new Runnable() {
public void run() {
cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null);
sItemsIdMap.remove(info.id);
sFolders.remove(info.id);
sDbIconCache.remove(info);
sWorkspaceItems.remove(info);
cr.delete(LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION,
LauncherSettings.Favorites.CONTAINER + "=" + info.id, null);
for (ItemInfo childInfo : info.contents) {
sItemsIdMap.remove(childInfo.id);
sDbIconCache.remove(childInfo);
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Set this as the current Launcher activity object for the loader.
*/
public void initialize(Callbacks callbacks) {
synchronized (mLock) {
mCallbacks = new WeakReference<Callbacks>(callbacks);
}
}
/**
* Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and
* ACTION_PACKAGE_CHANGED.
*/
@Override
public void onReceive(Context context, Intent intent) {
if (DEBUG_LOADERS) Log.d(TAG, "onReceive intent=" + intent);
final String action = intent.getAction();
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)
|| Intent.ACTION_PACKAGE_REMOVED.equals(action)
|| Intent.ACTION_PACKAGE_ADDED.equals(action)) {
final String packageName = intent.getData().getSchemeSpecificPart();
final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false);
int op = PackageUpdatedTask.OP_NONE;
if (packageName == null || packageName.length() == 0) {
// they sent us a bad intent
return;
}
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) {
op = PackageUpdatedTask.OP_UPDATE;
} else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_REMOVE;
}
// else, we are replacing the package, so a PACKAGE_ADDED will be sent
// later, we will update the package at this time
} else if (Intent.ACTION_PACKAGE_ADDED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_ADD;
} else {
op = PackageUpdatedTask.OP_UPDATE;
}
}
if (op != PackageUpdatedTask.OP_NONE) {
enqueuePackageUpdated(new PackageUpdatedTask(op, new String[] { packageName }));
}
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_AVAILABLE.equals(action)) {
// First, schedule to add these apps back in.
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(PackageUpdatedTask.OP_ADD, packages));
// Then, rebind everything.
startLoaderFromBackground();
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_UNAVAILABLE.equals(action)) {
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(
PackageUpdatedTask.OP_UNAVAILABLE, packages));
} else if (Intent.ACTION_LOCALE_CHANGED.equals(action)) {
// If we have changed locale we need to clear out the labels in all apps/workspace.
forceReload();
} else if (Intent.ACTION_CONFIGURATION_CHANGED.equals(action)) {
// Check if configuration change was an mcc/mnc change which would affect app resources
// and we would need to clear out the labels in all apps/workspace. Same handling as
// above for ACTION_LOCALE_CHANGED
Configuration currentConfig = context.getResources().getConfiguration();
if (mPreviousConfigMcc != currentConfig.mcc) {
Log.d(TAG, "Reload apps on config change. curr_mcc:"
+ currentConfig.mcc + " prevmcc:" + mPreviousConfigMcc);
forceReload();
}
// Update previousConfig
mPreviousConfigMcc = currentConfig.mcc;
} else if (SearchManager.INTENT_GLOBAL_SEARCH_ACTIVITY_CHANGED.equals(action) ||
SearchManager.INTENT_ACTION_SEARCHABLES_CHANGED.equals(action)) {
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
callbacks.bindSearchablesChanged();
}
}
}
}
private void forceReload() {
resetLoadedState(true, true);
// Do this here because if the launcher activity is running it will be restarted.
// If it's not running startLoaderFromBackground will merely tell it that it needs
// to reload.
startLoaderFromBackground();
}
public void resetLoadedState(boolean resetAllAppsLoaded, boolean resetWorkspaceLoaded) {
synchronized (mLock) {
// Stop any existing loaders first, so they don't set mAllAppsLoaded or
// mWorkspaceLoaded to true later
stopLoaderLocked();
if (resetAllAppsLoaded) mAllAppsLoaded = false;
if (resetWorkspaceLoaded) mWorkspaceLoaded = false;
}
}
/**
* When the launcher is in the background, it's possible for it to miss paired
* configuration changes. So whenever we trigger the loader from the background
* tell the launcher that it needs to re-run the loader when it comes back instead
* of doing it now.
*/
public void startLoaderFromBackground() {
boolean runLoader = false;
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
// Only actually run the loader if they're not paused.
if (!callbacks.setLoadOnResume()) {
runLoader = true;
}
}
}
if (runLoader) {
startLoader(false);
}
}
// If there is already a loader task running, tell it to stop.
// returns true if isLaunching() was true on the old task
private boolean stopLoaderLocked() {
boolean isLaunching = false;
LoaderTask oldTask = mLoaderTask;
if (oldTask != null) {
if (oldTask.isLaunching()) {
isLaunching = true;
}
oldTask.stopLocked();
}
return isLaunching;
}
public void startLoader(boolean isLaunching) {
synchronized (mLock) {
if (DEBUG_LOADERS) {
Log.d(TAG, "startLoader isLaunching=" + isLaunching);
}
// Don't bother to start the thread if we know it's not going to do anything
if (mCallbacks != null && mCallbacks.get() != null) {
// If there is already one running, tell it to stop.
// also, don't downgrade isLaunching if we're already running
isLaunching = isLaunching || stopLoaderLocked();
mLoaderTask = new LoaderTask(mApp, isLaunching);
sWorkerThread.setPriority(Thread.NORM_PRIORITY);
sWorker.post(mLoaderTask);
}
}
}
public void stopLoader() {
synchronized (mLock) {
if (mLoaderTask != null) {
mLoaderTask.stopLocked();
}
}
}
public boolean isAllAppsLoaded() {
return mAllAppsLoaded;
}
boolean isLoadingWorkspace() {
synchronized (mLock) {
if (mLoaderTask != null) {
return mLoaderTask.isLoadingWorkspace();
}
}
return false;
}
/**
* Runnable for the thread that loads the contents of the launcher:
* - workspace icons
* - widgets
* - all apps icons
*/
private class LoaderTask implements Runnable {
private Context mContext;
private Thread mWaitThread;
private boolean mIsLaunching;
private boolean mIsLoadingAndBindingWorkspace;
private boolean mStopped;
private boolean mLoadAndBindStepFinished;
private HashMap<Object, CharSequence> mLabelCache;
LoaderTask(Context context, boolean isLaunching) {
mContext = context;
mIsLaunching = isLaunching;
mLabelCache = new HashMap<Object, CharSequence>();
}
boolean isLaunching() {
return mIsLaunching;
}
boolean isLoadingWorkspace() {
return mIsLoadingAndBindingWorkspace;
}
private void loadAndBindWorkspace() {
mIsLoadingAndBindingWorkspace = true;
// Load the workspace
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindWorkspace mWorkspaceLoaded=" + mWorkspaceLoaded);
}
if (!mWorkspaceLoaded) {
loadWorkspace();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mWorkspaceLoaded = true;
}
}
// Bind the workspace
bindWorkspace();
}
private void waitForIdle() {
// Wait until the either we're stopped or the other threads are done.
// This way we don't start loading all apps until the workspace has settled
// down.
synchronized (LoaderTask.this) {
final long workspaceWaitTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
mHandler.postIdle(new Runnable() {
public void run() {
synchronized (LoaderTask.this) {
mLoadAndBindStepFinished = true;
if (DEBUG_LOADERS) {
Log.d(TAG, "done with previous binding step");
}
LoaderTask.this.notify();
}
}
});
while (!mStopped && !mLoadAndBindStepFinished) {
try {
this.wait();
} catch (InterruptedException ex) {
// Ignore
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "waited "
+ (SystemClock.uptimeMillis()-workspaceWaitTime)
+ "ms for previous step to finish binding");
}
}
}
public void run() {
// Optimize for end-user experience: if the Launcher is up and // running with the
// All Apps interface in the foreground, load All Apps first. Otherwise, load the
// workspace first (default).
final Callbacks cbk = mCallbacks.get();
final boolean loadWorkspaceFirst = cbk != null ? (!cbk.isAllAppsVisible()) : true;
keep_running: {
// Elevate priority when Home launches for the first time to avoid
// starving at boot time. Staring at a blank home is not cool.
synchronized (mLock) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to " +
(mIsLaunching ? "DEFAULT" : "BACKGROUND"));
android.os.Process.setThreadPriority(mIsLaunching
? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND);
}
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: loading workspace");
loadAndBindWorkspace();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: special: loading all apps");
loadAndBindAllApps();
}
if (mStopped) {
break keep_running;
}
// Whew! Hard work done. Slow us down, and wait until the UI thread has
// settled down.
synchronized (mLock) {
if (mIsLaunching) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to BACKGROUND");
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
}
}
waitForIdle();
// second step
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: loading all apps");
loadAndBindAllApps();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: special: loading workspace");
loadAndBindWorkspace();
}
// Restore the default thread priority after we are done loading items
synchronized (mLock) {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_DEFAULT);
}
}
// Update the saved icons if necessary
if (DEBUG_LOADERS) Log.d(TAG, "Comparing loaded icons to database icons");
for (Object key : sDbIconCache.keySet()) {
updateSavedIcon(mContext, (ShortcutInfo) key, sDbIconCache.get(key));
}
sDbIconCache.clear();
// Clear out this reference, otherwise we end up holding it until all of the
// callback runnables are done.
mContext = null;
synchronized (mLock) {
// If we are still the last one to be scheduled, remove ourselves.
if (mLoaderTask == this) {
mLoaderTask = null;
}
}
}
public void stopLocked() {
synchronized (LoaderTask.this) {
mStopped = true;
this.notify();
}
}
/**
* Gets the callbacks object. If we've been stopped, or if the launcher object
* has somehow been garbage collected, return null instead. Pass in the Callbacks
* object that was around when the deferred message was scheduled, and if there's
* a new Callbacks object around then also return null. This will save us from
* calling onto it with data that will be ignored.
*/
Callbacks tryGetCallbacks(Callbacks oldCallbacks) {
synchronized (mLock) {
if (mStopped) {
return null;
}
if (mCallbacks == null) {
return null;
}
final Callbacks callbacks = mCallbacks.get();
if (callbacks != oldCallbacks) {
return null;
}
if (callbacks == null) {
Log.w(TAG, "no mCallbacks");
return null;
}
return callbacks;
}
}
// check & update map of what's occupied; used to discard overlapping/invalid items
private boolean checkItemPlacement(ItemInfo occupied[][][], ItemInfo item) {
int containerIndex = item.screen;
if (item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
// Return early if we detect that an item is under the hotseat button
if (mCallbacks == null || mCallbacks.get().isAllAppsButtonRank(item.screen)) {
return false;
}
// We use the last index to refer to the hotseat and the screen as the rank, so
// test and update the occupied state accordingly
if (occupied[Launcher.SCREEN_COUNT][item.screen][0] != null) {
Log.e(TAG, "Error loading shortcut into hotseat " + item
+ " into position (" + item.screen + ":" + item.cellX + "," + item.cellY
+ ") occupied by " + occupied[Launcher.SCREEN_COUNT][item.screen][0]);
return false;
} else {
occupied[Launcher.SCREEN_COUNT][item.screen][0] = item;
return true;
}
} else if (item.container != LauncherSettings.Favorites.CONTAINER_DESKTOP) {
// Skip further checking if it is not the hotseat or workspace container
return true;
}
// Check if any workspace icons overlap with each other
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
if (occupied[containerIndex][x][y] != null) {
Log.e(TAG, "Error loading shortcut " + item
+ " into cell (" + containerIndex + "-" + item.screen + ":"
+ x + "," + y
+ ") occupied by "
+ occupied[containerIndex][x][y]);
return false;
}
}
}
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
occupied[containerIndex][x][y] = item;
}
}
return true;
}
private void loadWorkspace() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
final Context context = mContext;
final ContentResolver contentResolver = context.getContentResolver();
final PackageManager manager = context.getPackageManager();
final AppWidgetManager widgets = AppWidgetManager.getInstance(context);
final boolean isSafeMode = manager.isSafeMode();
// Make sure the default workspace is loaded, if needed
mApp.getLauncherProvider().loadDefaultFavoritesIfNecessary();
sWorkspaceItems.clear();
sAppWidgets.clear();
sFolders.clear();
sItemsIdMap.clear();
sDbIconCache.clear();
final ArrayList<Long> itemsToRemove = new ArrayList<Long>();
final Cursor c = contentResolver.query(
LauncherSettings.Favorites.CONTENT_URI, null, null, null, null);
// +1 for the hotseat (it can be larger than the workspace)
// Load workspace in reverse order to ensure that latest items are loaded first (and
// before any earlier duplicates)
final ItemInfo occupied[][][] =
new ItemInfo[Launcher.SCREEN_COUNT + 1][mCellCountX + 1][mCellCountY + 1];
try {
final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID);
final int intentIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.INTENT);
final int titleIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.TITLE);
final int iconTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_TYPE);
final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON);
final int iconPackageIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_PACKAGE);
final int iconResourceIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_RESOURCE);
final int containerIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.CONTAINER);
final int itemTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ITEM_TYPE);
final int appWidgetIdIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.APPWIDGET_ID);
final int screenIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SPANY);
//final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI);
//final int displayModeIndex = c.getColumnIndexOrThrow(
// LauncherSettings.Favorites.DISPLAY_MODE);
ShortcutInfo info;
String intentDescription;
LauncherAppWidgetInfo appWidgetInfo;
int container;
long id;
Intent intent;
while (!mStopped && c.moveToNext()) {
try {
int itemType = c.getInt(itemTypeIndex);
switch (itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
intentDescription = c.getString(intentIndex);
try {
intent = Intent.parseUri(intentDescription, 0);
} catch (URISyntaxException e) {
continue;
}
if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) {
info = getShortcutInfo(manager, intent, context, c, iconIndex,
titleIndex, mLabelCache);
} else {
info = getShortcutInfo(c, context, iconTypeIndex,
iconPackageIndex, iconResourceIndex, iconIndex,
titleIndex);
// App shortcuts that used to be automatically added to Launcher
// didn't always have the correct intent flags set, so do that here
- if (intent.getAction().equals(Intent.ACTION_MAIN) &&
+ if (intent.getAction() != null &&
+ intent.getCategories() != null &&
+ intent.getAction().equals(Intent.ACTION_MAIN) &&
intent.getCategories().contains(Intent.CATEGORY_LAUNCHER)) {
intent.addFlags(
Intent.FLAG_ACTIVITY_NEW_TASK |
Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
}
}
if (info != null) {
info.intent = intent;
info.id = c.getLong(idIndex);
container = c.getInt(containerIndex);
info.container = container;
info.screen = c.getInt(screenIndex);
info.cellX = c.getInt(cellXIndex);
info.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, info)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(info);
break;
default:
// Item is in a user folder
FolderInfo folderInfo =
findOrMakeFolder(sFolders, container);
folderInfo.add(info);
break;
}
sItemsIdMap.put(info.id, info);
// now that we've loaded everthing re-save it with the
// icon in case it disappears somehow.
queueIconToBeChecked(sDbIconCache, info, c, iconIndex);
} else {
// Failed to load the shortcut, probably because the
// activity manager couldn't resolve it (maybe the app
// was uninstalled), or the db row was somehow screwed up.
// Delete it.
id = c.getLong(idIndex);
Log.e(TAG, "Error loading shortcut " + id + ", removing it");
contentResolver.delete(LauncherSettings.Favorites.getContentUri(
id, false), null, null);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
id = c.getLong(idIndex);
FolderInfo folderInfo = findOrMakeFolder(sFolders, id);
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
container = c.getInt(containerIndex);
folderInfo.container = container;
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, folderInfo)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(folderInfo);
break;
}
sItemsIdMap.put(folderInfo.id, folderInfo);
sFolders.put(folderInfo.id, folderInfo);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
// Read all Launcher-specific widget details
int appWidgetId = c.getInt(appWidgetIdIndex);
id = c.getLong(idIndex);
final AppWidgetProviderInfo provider =
widgets.getAppWidgetInfo(appWidgetId);
if (!isSafeMode && (provider == null || provider.provider == null ||
provider.provider.getPackageName() == null)) {
String log = "Deleting widget that isn't installed anymore: id="
+ id + " appWidgetId=" + appWidgetId;
Log.e(TAG, log);
Launcher.sDumpLogs.add(log);
itemsToRemove.add(id);
} else {
appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId,
provider.provider);
appWidgetInfo.id = id;
appWidgetInfo.screen = c.getInt(screenIndex);
appWidgetInfo.cellX = c.getInt(cellXIndex);
appWidgetInfo.cellY = c.getInt(cellYIndex);
appWidgetInfo.spanX = c.getInt(spanXIndex);
appWidgetInfo.spanY = c.getInt(spanYIndex);
int[] minSpan = Launcher.getMinSpanForWidget(context, provider);
appWidgetInfo.minSpanX = minSpan[0];
appWidgetInfo.minSpanY = minSpan[1];
container = c.getInt(containerIndex);
if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP &&
container != LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
Log.e(TAG, "Widget found where container "
+ "!= CONTAINER_DESKTOP nor CONTAINER_HOTSEAT - ignoring!");
continue;
}
appWidgetInfo.container = c.getInt(containerIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, appWidgetInfo)) {
break;
}
sItemsIdMap.put(appWidgetInfo.id, appWidgetInfo);
sAppWidgets.add(appWidgetInfo);
}
break;
}
} catch (Exception e) {
Log.w(TAG, "Desktop items loading interrupted:", e);
}
}
} finally {
c.close();
}
if (itemsToRemove.size() > 0) {
ContentProviderClient client = contentResolver.acquireContentProviderClient(
LauncherSettings.Favorites.CONTENT_URI);
// Remove dead items
for (long id : itemsToRemove) {
if (DEBUG_LOADERS) {
Log.d(TAG, "Removed id = " + id);
}
// Don't notify content observers
try {
client.delete(LauncherSettings.Favorites.getContentUri(id, false),
null, null);
} catch (RemoteException e) {
Log.w(TAG, "Could not remove id = " + id);
}
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms");
Log.d(TAG, "workspace layout: ");
for (int y = 0; y < mCellCountY; y++) {
String line = "";
for (int s = 0; s < Launcher.SCREEN_COUNT; s++) {
if (s > 0) {
line += " | ";
}
for (int x = 0; x < mCellCountX; x++) {
line += ((occupied[s][x][y] != null) ? "#" : ".");
}
}
Log.d(TAG, "[ " + line + " ]");
}
}
}
/**
* Read everything out of our database.
*/
private void bindWorkspace() {
final long t = SystemClock.uptimeMillis();
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher");
return;
}
// Get the list of workspace items to load and unbind the existing ShortcutInfos
// before we call startBinding() below.
final int currentScreen = oldCallbacks.getCurrentWorkspaceScreen();
final ArrayList<ItemInfo> tmpWorkspaceItems = unbindWorkspaceItemsOnMainThread();
// Order the items for loading as follows: current workspace, hotseat, everything else
Collections.sort(tmpWorkspaceItems, new Comparator<ItemInfo>() {
@Override
public int compare(ItemInfo lhs, ItemInfo rhs) {
int cellCountX = LauncherModel.getCellCountX();
int cellCountY = LauncherModel.getCellCountY();
int screenOffset = cellCountX * cellCountY;
int containerOffset = screenOffset * (Launcher.SCREEN_COUNT + 1); // +1 hotseat
long lr = (lhs.container * containerOffset + lhs.screen * screenOffset +
lhs.cellY * cellCountX + lhs.cellX);
long rr = (rhs.container * containerOffset + rhs.screen * screenOffset +
rhs.cellY * cellCountX + rhs.cellX);
return (int) (lr - rr);
}
});
// Precondition: the items are ordered by page, screen
final ArrayList<ItemInfo> workspaceItems = new ArrayList<ItemInfo>();
for (ItemInfo ii : tmpWorkspaceItems) {
// Prepend the current items, hotseat items, append everything else
if (ii.container == LauncherSettings.Favorites.CONTAINER_DESKTOP &&
ii.screen == currentScreen) {
workspaceItems.add(0, ii);
} else if (ii.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
workspaceItems.add(0, ii);
} else {
workspaceItems.add(ii);
}
}
// Tell the workspace that we're about to start firing items at it
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.startBinding();
}
}
});
// Add the items to the workspace.
int N = workspaceItems.size();
for (int i=0; i<N; i+=ITEMS_CHUNK) {
final int start = i;
final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindItems(workspaceItems, start, start+chunkSize);
}
}
});
}
// Ensure that we don't use the same folders data structure on the main thread
final HashMap<Long, FolderInfo> folders = new HashMap<Long, FolderInfo>(sFolders);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindFolders(folders);
}
}
});
// Wait until the queue goes empty.
mHandler.post(new Runnable() {
public void run() {
if (DEBUG_LOADERS) {
Log.d(TAG, "Going to start binding widgets soon.");
}
}
});
// Bind the widgets, one at a time.
// WARNING: this is calling into the workspace from the background thread,
// but since getCurrentScreen() just returns the int, we should be okay. This
// is just a hint for the order, and if it's wrong, we'll be okay.
// TODO: instead, we should have that push the current screen into here.
N = sAppWidgets.size();
// once for the current screen
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen == currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// once for the other screens
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen != currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// Tell the workspace that we're done.
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.finishBindingItems();
}
}
});
// Cleanup
mHandler.post(new Runnable() {
public void run() {
// If we're profiling, ensure this is the last thing in the queue.
if (DEBUG_LOADERS) {
Log.d(TAG, "bound workspace in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
mIsLoadingAndBindingWorkspace = false;
}
});
}
private void loadAndBindAllApps() {
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindAllApps mAllAppsLoaded=" + mAllAppsLoaded);
}
if (!mAllAppsLoaded) {
loadAllAppsByBatch();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mAllAppsLoaded = true;
}
} else {
onlyBindAllApps();
}
}
private void onlyBindAllApps() {
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (onlyBindAllApps)");
return;
}
// shallow copy
@SuppressWarnings("unchecked")
final ArrayList<ApplicationInfo> list
= (ArrayList<ApplicationInfo>) mAllAppsList.data.clone();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAllApplications(list);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound all " + list.size() + " apps from cache in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
}
});
}
private void loadAllAppsByBatch() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (loadAllAppsByBatch)");
return;
}
final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);
final PackageManager packageManager = mContext.getPackageManager();
List<ResolveInfo> apps = null;
int N = Integer.MAX_VALUE;
int startIndex;
int i=0;
int batchSize = -1;
while (i < N && !mStopped) {
if (i == 0) {
mAllAppsList.clear();
final long qiaTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
apps = packageManager.queryIntentActivities(mainIntent, 0);
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities took "
+ (SystemClock.uptimeMillis()-qiaTime) + "ms");
}
if (apps == null) {
return;
}
N = apps.size();
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities got " + N + " apps");
}
if (N == 0) {
// There are no apps?!?
return;
}
if (mBatchSize == 0) {
batchSize = N;
} else {
batchSize = mBatchSize;
}
final long sortTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
Collections.sort(apps,
new LauncherModel.ShortcutNameComparator(packageManager, mLabelCache));
if (DEBUG_LOADERS) {
Log.d(TAG, "sort took "
+ (SystemClock.uptimeMillis()-sortTime) + "ms");
}
}
final long t2 = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
startIndex = i;
for (int j=0; i<N && j<batchSize; j++) {
// This builds the icon bitmaps.
mAllAppsList.add(new ApplicationInfo(packageManager, apps.get(i),
mIconCache, mLabelCache));
i++;
}
final boolean first = i <= batchSize;
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
final ArrayList<ApplicationInfo> added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
if (callbacks != null) {
if (first) {
callbacks.bindAllApplications(added);
} else {
callbacks.bindAppsAdded(added);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound " + added.size() + " apps in "
+ (SystemClock.uptimeMillis() - t) + "ms");
}
} else {
Log.i(TAG, "not binding apps: no Launcher activity");
}
}
});
if (DEBUG_LOADERS) {
Log.d(TAG, "batch of " + (i-startIndex) + " icons processed in "
+ (SystemClock.uptimeMillis()-t2) + "ms");
}
if (mAllAppsLoadDelay > 0 && i < N) {
try {
if (DEBUG_LOADERS) {
Log.d(TAG, "sleeping for " + mAllAppsLoadDelay + "ms");
}
Thread.sleep(mAllAppsLoadDelay);
} catch (InterruptedException exc) { }
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "cached all " + N + " apps in "
+ (SystemClock.uptimeMillis()-t) + "ms"
+ (mAllAppsLoadDelay > 0 ? " (including delay)" : ""));
}
}
public void dumpState() {
Log.d(TAG, "mLoaderTask.mContext=" + mContext);
Log.d(TAG, "mLoaderTask.mWaitThread=" + mWaitThread);
Log.d(TAG, "mLoaderTask.mIsLaunching=" + mIsLaunching);
Log.d(TAG, "mLoaderTask.mStopped=" + mStopped);
Log.d(TAG, "mLoaderTask.mLoadAndBindStepFinished=" + mLoadAndBindStepFinished);
Log.d(TAG, "mItems size=" + sWorkspaceItems.size());
}
}
void enqueuePackageUpdated(PackageUpdatedTask task) {
sWorker.post(task);
}
private class PackageUpdatedTask implements Runnable {
int mOp;
String[] mPackages;
public static final int OP_NONE = 0;
public static final int OP_ADD = 1;
public static final int OP_UPDATE = 2;
public static final int OP_REMOVE = 3; // uninstlled
public static final int OP_UNAVAILABLE = 4; // external media unmounted
public PackageUpdatedTask(int op, String[] packages) {
mOp = op;
mPackages = packages;
}
public void run() {
final Context context = mApp;
final String[] packages = mPackages;
final int N = packages.length;
switch (mOp) {
case OP_ADD:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.addPackage " + packages[i]);
mAllAppsList.addPackage(context, packages[i]);
}
break;
case OP_UPDATE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.updatePackage " + packages[i]);
mAllAppsList.updatePackage(context, packages[i]);
}
break;
case OP_REMOVE:
case OP_UNAVAILABLE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.removePackage " + packages[i]);
mAllAppsList.removePackage(packages[i]);
}
break;
}
ArrayList<ApplicationInfo> added = null;
ArrayList<ApplicationInfo> removed = null;
ArrayList<ApplicationInfo> modified = null;
if (mAllAppsList.added.size() > 0) {
added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
}
if (mAllAppsList.removed.size() > 0) {
removed = mAllAppsList.removed;
mAllAppsList.removed = new ArrayList<ApplicationInfo>();
for (ApplicationInfo info: removed) {
mIconCache.remove(info.intent.getComponent());
}
}
if (mAllAppsList.modified.size() > 0) {
modified = mAllAppsList.modified;
mAllAppsList.modified = new ArrayList<ApplicationInfo>();
}
final Callbacks callbacks = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == null) {
Log.w(TAG, "Nobody to tell about the new app. Launcher is probably loading.");
return;
}
if (added != null) {
final ArrayList<ApplicationInfo> addedFinal = added;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsAdded(addedFinal);
}
}
});
}
if (modified != null) {
final ArrayList<ApplicationInfo> modifiedFinal = modified;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsUpdated(modifiedFinal);
}
}
});
}
if (removed != null) {
final boolean permanent = mOp != OP_UNAVAILABLE;
final ArrayList<ApplicationInfo> removedFinal = removed;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsRemoved(removedFinal, permanent);
}
}
});
}
mHandler.post(new Runnable() {
@Override
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindPackagesUpdated();
}
}
});
}
}
/**
* Returns all the Workspace ShortcutInfos associated with a particular package.
* @param intent
* @return
*/
ArrayList<ShortcutInfo> getShortcutInfosForPackage(String packageName) {
ArrayList<ShortcutInfo> infos = new ArrayList<ShortcutInfo>();
for (ItemInfo i : sWorkspaceItems) {
if (i instanceof ShortcutInfo) {
ShortcutInfo info = (ShortcutInfo) i;
if (packageName.equals(info.getPackageName())) {
infos.add(info);
}
}
}
return infos;
}
/**
* This is called from the code that adds shortcuts from the intent receiver. This
* doesn't have a Cursor, but
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context) {
return getShortcutInfo(manager, intent, context, null, -1, -1, null);
}
/**
* Make an ShortcutInfo object for a shortcut that is an application.
*
* If c is not null, then it will be used to fill in missing data like the title and icon.
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context,
Cursor c, int iconIndex, int titleIndex, HashMap<Object, CharSequence> labelCache) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
ComponentName componentName = intent.getComponent();
if (componentName == null) {
return null;
}
try {
PackageInfo pi = manager.getPackageInfo(componentName.getPackageName(), 0);
if (!pi.applicationInfo.enabled) {
// If we return null here, the corresponding item will be removed from the launcher
// db and will not appear in the workspace.
return null;
}
} catch (NameNotFoundException e) {
Log.d(TAG, "getPackInfo failed for package " + componentName.getPackageName());
}
// TODO: See if the PackageManager knows about this case. If it doesn't
// then return null & delete this.
// the resource -- This may implicitly give us back the fallback icon,
// but don't worry about that. All we're doing with usingFallbackIcon is
// to avoid saving lots of copies of that in the database, and most apps
// have icons anyway.
// Attempt to use queryIntentActivities to get the ResolveInfo (with IntentFilter info) and
// if that fails, or is ambiguious, fallback to the standard way of getting the resolve info
// via resolveActivity().
ResolveInfo resolveInfo = null;
ComponentName oldComponent = intent.getComponent();
Intent newIntent = new Intent(intent.getAction(), null);
newIntent.addCategory(Intent.CATEGORY_LAUNCHER);
newIntent.setPackage(oldComponent.getPackageName());
List<ResolveInfo> infos = manager.queryIntentActivities(newIntent, 0);
for (ResolveInfo i : infos) {
ComponentName cn = new ComponentName(i.activityInfo.packageName,
i.activityInfo.name);
if (cn.equals(oldComponent)) {
resolveInfo = i;
}
}
if (resolveInfo == null) {
resolveInfo = manager.resolveActivity(intent, 0);
}
if (resolveInfo != null) {
icon = mIconCache.getIcon(componentName, resolveInfo, labelCache);
}
// the db
if (icon == null) {
if (c != null) {
icon = getIconFromCursor(c, iconIndex, context);
}
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
info.setIcon(icon);
// from the resource
if (resolveInfo != null) {
ComponentName key = LauncherModel.getComponentNameFromResolveInfo(resolveInfo);
if (labelCache != null && labelCache.containsKey(key)) {
info.title = labelCache.get(key);
} else {
info.title = resolveInfo.activityInfo.loadLabel(manager);
if (labelCache != null) {
labelCache.put(key, info.title);
}
}
}
// from the db
if (info.title == null) {
if (c != null) {
info.title = c.getString(titleIndex);
}
}
// fall back to the class name of the activity
if (info.title == null) {
info.title = componentName.getClassName();
}
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION;
return info;
}
/**
* Make an ShortcutInfo object for a shortcut that isn't an application.
*/
private ShortcutInfo getShortcutInfo(Cursor c, Context context,
int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex,
int titleIndex) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT;
// TODO: If there's an explicit component and we can't install that, delete it.
info.title = c.getString(titleIndex);
int iconType = c.getInt(iconTypeIndex);
switch (iconType) {
case LauncherSettings.Favorites.ICON_TYPE_RESOURCE:
String packageName = c.getString(iconPackageIndex);
String resourceName = c.getString(iconResourceIndex);
PackageManager packageManager = context.getPackageManager();
info.customIcon = false;
// the resource
try {
Resources resources = packageManager.getResourcesForApplication(packageName);
if (resources != null) {
final int id = resources.getIdentifier(resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
}
} catch (Exception e) {
// drop this. we have other places to look for icons
}
// the db
if (icon == null) {
icon = getIconFromCursor(c, iconIndex, context);
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
break;
case LauncherSettings.Favorites.ICON_TYPE_BITMAP:
icon = getIconFromCursor(c, iconIndex, context);
if (icon == null) {
icon = getFallbackIcon();
info.customIcon = false;
info.usingFallbackIcon = true;
} else {
info.customIcon = true;
}
break;
default:
icon = getFallbackIcon();
info.usingFallbackIcon = true;
info.customIcon = false;
break;
}
info.setIcon(icon);
return info;
}
Bitmap getIconFromCursor(Cursor c, int iconIndex, Context context) {
@SuppressWarnings("all") // suppress dead code warning
final boolean debug = false;
if (debug) {
Log.d(TAG, "getIconFromCursor app="
+ c.getString(c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE)));
}
byte[] data = c.getBlob(iconIndex);
try {
return Utilities.createIconBitmap(
BitmapFactory.decodeByteArray(data, 0, data.length), context);
} catch (Exception e) {
return null;
}
}
ShortcutInfo addShortcut(Context context, Intent data, long container, int screen,
int cellX, int cellY, boolean notify) {
final ShortcutInfo info = infoFromShortcutIntent(context, data, null);
if (info == null) {
return null;
}
addItemToDatabase(context, info, container, screen, cellX, cellY, notify);
return info;
}
/**
* Attempts to find an AppWidgetProviderInfo that matches the given component.
*/
AppWidgetProviderInfo findAppWidgetProviderInfoWithComponent(Context context,
ComponentName component) {
List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
for (AppWidgetProviderInfo info : widgets) {
if (info.provider.equals(component)) {
return info;
}
}
return null;
}
/**
* Returns a list of all the widgets that can handle configuration with a particular mimeType.
*/
List<WidgetMimeTypeHandlerData> resolveWidgetsForMimeType(Context context, String mimeType) {
final PackageManager packageManager = context.getPackageManager();
final List<WidgetMimeTypeHandlerData> supportedConfigurationActivities =
new ArrayList<WidgetMimeTypeHandlerData>();
final Intent supportsIntent =
new Intent(InstallWidgetReceiver.ACTION_SUPPORTS_CLIPDATA_MIMETYPE);
supportsIntent.setType(mimeType);
// Create a set of widget configuration components that we can test against
final List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
final HashMap<ComponentName, AppWidgetProviderInfo> configurationComponentToWidget =
new HashMap<ComponentName, AppWidgetProviderInfo>();
for (AppWidgetProviderInfo info : widgets) {
configurationComponentToWidget.put(info.configure, info);
}
// Run through each of the intents that can handle this type of clip data, and cross
// reference them with the components that are actual configuration components
final List<ResolveInfo> activities = packageManager.queryIntentActivities(supportsIntent,
PackageManager.MATCH_DEFAULT_ONLY);
for (ResolveInfo info : activities) {
final ActivityInfo activityInfo = info.activityInfo;
final ComponentName infoComponent = new ComponentName(activityInfo.packageName,
activityInfo.name);
if (configurationComponentToWidget.containsKey(infoComponent)) {
supportedConfigurationActivities.add(
new InstallWidgetReceiver.WidgetMimeTypeHandlerData(info,
configurationComponentToWidget.get(infoComponent)));
}
}
return supportedConfigurationActivities;
}
ShortcutInfo infoFromShortcutIntent(Context context, Intent data, Bitmap fallbackIcon) {
Intent intent = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_INTENT);
String name = data.getStringExtra(Intent.EXTRA_SHORTCUT_NAME);
Parcelable bitmap = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON);
if (intent == null) {
// If the intent is null, we can't construct a valid ShortcutInfo, so we return null
Log.e(TAG, "Can't construct ShorcutInfo with null intent");
return null;
}
Bitmap icon = null;
boolean customIcon = false;
ShortcutIconResource iconResource = null;
if (bitmap != null && bitmap instanceof Bitmap) {
icon = Utilities.createIconBitmap(new FastBitmapDrawable((Bitmap)bitmap), context);
customIcon = true;
} else {
Parcelable extra = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON_RESOURCE);
if (extra != null && extra instanceof ShortcutIconResource) {
try {
iconResource = (ShortcutIconResource) extra;
final PackageManager packageManager = context.getPackageManager();
Resources resources = packageManager.getResourcesForApplication(
iconResource.packageName);
final int id = resources.getIdentifier(iconResource.resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
} catch (Exception e) {
Log.w(TAG, "Could not load shortcut icon: " + extra);
}
}
}
final ShortcutInfo info = new ShortcutInfo();
if (icon == null) {
if (fallbackIcon != null) {
icon = fallbackIcon;
} else {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
}
info.setIcon(icon);
info.title = name;
info.intent = intent;
info.customIcon = customIcon;
info.iconResource = iconResource;
return info;
}
boolean queueIconToBeChecked(HashMap<Object, byte[]> cache, ShortcutInfo info, Cursor c,
int iconIndex) {
// If apps can't be on SD, don't even bother.
if (!mAppsCanBeOnExternalStorage) {
return false;
}
// If this icon doesn't have a custom icon, check to see
// what's stored in the DB, and if it doesn't match what
// we're going to show, store what we are going to show back
// into the DB. We do this so when we're loading, if the
// package manager can't find an icon (for example because
// the app is on SD) then we can use that instead.
if (!info.customIcon && !info.usingFallbackIcon) {
cache.put(info, c.getBlob(iconIndex));
return true;
}
return false;
}
void updateSavedIcon(Context context, ShortcutInfo info, byte[] data) {
boolean needSave = false;
try {
if (data != null) {
Bitmap saved = BitmapFactory.decodeByteArray(data, 0, data.length);
Bitmap loaded = info.getIcon(mIconCache);
needSave = !saved.sameAs(loaded);
} else {
needSave = true;
}
} catch (Exception e) {
needSave = true;
}
if (needSave) {
Log.d(TAG, "going to save icon bitmap for info=" + info);
// This is slower than is ideal, but this only happens once
// or when the app is updated with a new icon.
updateItemInDatabase(context, info);
}
}
/**
* Return an existing FolderInfo object if we have encountered this ID previously,
* or make a new one.
*/
private static FolderInfo findOrMakeFolder(HashMap<Long, FolderInfo> folders, long id) {
// See if a placeholder was created for us already
FolderInfo folderInfo = folders.get(id);
if (folderInfo == null) {
// No placeholder -- create a new instance
folderInfo = new FolderInfo();
folders.put(id, folderInfo);
}
return folderInfo;
}
private static final Collator sCollator = Collator.getInstance();
public static final Comparator<ApplicationInfo> APP_NAME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
int result = sCollator.compare(a.title.toString(), b.title.toString());
if (result == 0) {
result = a.componentName.compareTo(b.componentName);
}
return result;
}
};
public static final Comparator<ApplicationInfo> APP_INSTALL_TIME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
if (a.firstInstallTime < b.firstInstallTime) return 1;
if (a.firstInstallTime > b.firstInstallTime) return -1;
return 0;
}
};
public static final Comparator<AppWidgetProviderInfo> WIDGET_NAME_COMPARATOR
= new Comparator<AppWidgetProviderInfo>() {
public final int compare(AppWidgetProviderInfo a, AppWidgetProviderInfo b) {
return sCollator.compare(a.label.toString(), b.label.toString());
}
};
static ComponentName getComponentNameFromResolveInfo(ResolveInfo info) {
if (info.activityInfo != null) {
return new ComponentName(info.activityInfo.packageName, info.activityInfo.name);
} else {
return new ComponentName(info.serviceInfo.packageName, info.serviceInfo.name);
}
}
public static class ShortcutNameComparator implements Comparator<ResolveInfo> {
private PackageManager mPackageManager;
private HashMap<Object, CharSequence> mLabelCache;
ShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, CharSequence>();
}
ShortcutNameComparator(PackageManager pm, HashMap<Object, CharSequence> labelCache) {
mPackageManager = pm;
mLabelCache = labelCache;
}
public final int compare(ResolveInfo a, ResolveInfo b) {
CharSequence labelA, labelB;
ComponentName keyA = LauncherModel.getComponentNameFromResolveInfo(a);
ComponentName keyB = LauncherModel.getComponentNameFromResolveInfo(b);
if (mLabelCache.containsKey(keyA)) {
labelA = mLabelCache.get(keyA);
} else {
labelA = a.loadLabel(mPackageManager).toString();
mLabelCache.put(keyA, labelA);
}
if (mLabelCache.containsKey(keyB)) {
labelB = mLabelCache.get(keyB);
} else {
labelB = b.loadLabel(mPackageManager).toString();
mLabelCache.put(keyB, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public static class WidgetAndShortcutNameComparator implements Comparator<Object> {
private PackageManager mPackageManager;
private HashMap<Object, String> mLabelCache;
WidgetAndShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, String>();
}
public final int compare(Object a, Object b) {
String labelA, labelB;
if (mLabelCache.containsKey(a)) {
labelA = mLabelCache.get(a);
} else {
labelA = (a instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) a).label :
((ResolveInfo) a).loadLabel(mPackageManager).toString();
mLabelCache.put(a, labelA);
}
if (mLabelCache.containsKey(b)) {
labelB = mLabelCache.get(b);
} else {
labelB = (b instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) b).label :
((ResolveInfo) b).loadLabel(mPackageManager).toString();
mLabelCache.put(b, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public void dumpState() {
Log.d(TAG, "mCallbacks=" + mCallbacks);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.data", mAllAppsList.data);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.added", mAllAppsList.added);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.removed", mAllAppsList.removed);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.modified", mAllAppsList.modified);
if (mLoaderTask != null) {
mLoaderTask.dumpState();
} else {
Log.d(TAG, "mLoaderTask=null");
}
}
}
| true | true | static boolean shortcutExists(Context context, String title, Intent intent) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI,
new String[] { "title", "intent" }, "title=? and intent=?",
new String[] { title, intent.toUri(0) }, null);
boolean result = false;
try {
result = c.moveToFirst();
} finally {
c.close();
}
return result;
}
/**
* Returns an ItemInfo array containing all the items in the LauncherModel.
* The ItemInfo.id is not set through this function.
*/
static ArrayList<ItemInfo> getItemsInLocalCoordinates(Context context) {
ArrayList<ItemInfo> items = new ArrayList<ItemInfo>();
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] {
LauncherSettings.Favorites.ITEM_TYPE, LauncherSettings.Favorites.CONTAINER,
LauncherSettings.Favorites.SCREEN, LauncherSettings.Favorites.CELLX, LauncherSettings.Favorites.CELLY,
LauncherSettings.Favorites.SPANX, LauncherSettings.Favorites.SPANY }, null, null, null);
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANY);
try {
while (c.moveToNext()) {
ItemInfo item = new ItemInfo();
item.cellX = c.getInt(cellXIndex);
item.cellY = c.getInt(cellYIndex);
item.spanX = c.getInt(spanXIndex);
item.spanY = c.getInt(spanYIndex);
item.container = c.getInt(containerIndex);
item.itemType = c.getInt(itemTypeIndex);
item.screen = c.getInt(screenIndex);
items.add(item);
}
} catch (Exception e) {
items.clear();
} finally {
c.close();
}
return items;
}
/**
* Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList.
*/
FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null,
"_id=? and (itemType=? or itemType=?)",
new String[] { String.valueOf(id),
String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_FOLDER)}, null);
try {
if (c.moveToFirst()) {
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
FolderInfo folderInfo = null;
switch (c.getInt(itemTypeIndex)) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
folderInfo = findOrMakeFolder(folderList, id);
break;
}
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
folderInfo.container = c.getInt(containerIndex);
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
return folderInfo;
}
} finally {
c.close();
}
return null;
}
/**
* Add an item to the database in a specified container. Sets the container, screen, cellX and
* cellY fields of the item. Also assigns an ID to the item.
*/
static void addItemToDatabase(Context context, final ItemInfo item, final long container,
final int screen, final int cellX, final int cellY, final boolean notify) {
item.container = container;
item.cellX = cellX;
item.cellY = cellY;
// We store hotseat items in canonical form which is this orientation invariant position
// in the hotseat
if (context instanceof Launcher && screen < 0 &&
container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
item.screen = ((Launcher) context).getHotseat().getOrderInHotseat(cellX, cellY);
} else {
item.screen = screen;
}
final ContentValues values = new ContentValues();
final ContentResolver cr = context.getContentResolver();
item.onAddToDatabase(values);
LauncherApplication app = (LauncherApplication) context.getApplicationContext();
item.id = app.getLauncherProvider().generateNewId();
values.put(LauncherSettings.Favorites._ID, item.id);
item.updateValuesWithCoordinates(values, item.cellX, item.cellY);
Runnable r = new Runnable() {
public void run() {
cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI :
LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values);
if (sItemsIdMap.containsKey(item.id)) {
// we should not be adding new items in the db with the same id
throw new RuntimeException("Error: ItemInfo id (" + item.id + ") passed to " +
"addItemToDatabase already exists." + item.toString());
}
sItemsIdMap.put(item.id, item);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.put(item.id, (FolderInfo) item);
// Fall through
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
if (item.container == LauncherSettings.Favorites.CONTAINER_DESKTOP ||
item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
sWorkspaceItems.add(item);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.add((LauncherAppWidgetInfo) item);
break;
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Creates a new unique child id, for a given cell span across all layouts.
*/
static int getCellLayoutChildId(
long container, int screen, int localCellX, int localCellY, int spanX, int spanY) {
return (((int) container & 0xFF) << 24)
| (screen & 0xFF) << 16 | (localCellX & 0xFF) << 8 | (localCellY & 0xFF);
}
static int getCellCountX() {
return mCellCountX;
}
static int getCellCountY() {
return mCellCountY;
}
/**
* Updates the model orientation helper to take into account the current layout dimensions
* when performing local/canonical coordinate transformations.
*/
static void updateWorkspaceLayoutCells(int shortAxisCellCount, int longAxisCellCount) {
mCellCountX = shortAxisCellCount;
mCellCountY = longAxisCellCount;
}
/**
* Removes the specified item from the database
* @param context
* @param item
*/
static void deleteItemFromDatabase(Context context, final ItemInfo item) {
final ContentResolver cr = context.getContentResolver();
final Uri uriToDelete = LauncherSettings.Favorites.getContentUri(item.id, false);
Runnable r = new Runnable() {
public void run() {
cr.delete(uriToDelete, null, null);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.remove(item.id);
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.remove((LauncherAppWidgetInfo) item);
break;
}
sItemsIdMap.remove(item.id);
sDbIconCache.remove(item);
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Remove the contents of the specified folder from the database
*/
static void deleteFolderContentsFromDatabase(Context context, final FolderInfo info) {
final ContentResolver cr = context.getContentResolver();
Runnable r = new Runnable() {
public void run() {
cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null);
sItemsIdMap.remove(info.id);
sFolders.remove(info.id);
sDbIconCache.remove(info);
sWorkspaceItems.remove(info);
cr.delete(LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION,
LauncherSettings.Favorites.CONTAINER + "=" + info.id, null);
for (ItemInfo childInfo : info.contents) {
sItemsIdMap.remove(childInfo.id);
sDbIconCache.remove(childInfo);
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Set this as the current Launcher activity object for the loader.
*/
public void initialize(Callbacks callbacks) {
synchronized (mLock) {
mCallbacks = new WeakReference<Callbacks>(callbacks);
}
}
/**
* Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and
* ACTION_PACKAGE_CHANGED.
*/
@Override
public void onReceive(Context context, Intent intent) {
if (DEBUG_LOADERS) Log.d(TAG, "onReceive intent=" + intent);
final String action = intent.getAction();
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)
|| Intent.ACTION_PACKAGE_REMOVED.equals(action)
|| Intent.ACTION_PACKAGE_ADDED.equals(action)) {
final String packageName = intent.getData().getSchemeSpecificPart();
final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false);
int op = PackageUpdatedTask.OP_NONE;
if (packageName == null || packageName.length() == 0) {
// they sent us a bad intent
return;
}
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) {
op = PackageUpdatedTask.OP_UPDATE;
} else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_REMOVE;
}
// else, we are replacing the package, so a PACKAGE_ADDED will be sent
// later, we will update the package at this time
} else if (Intent.ACTION_PACKAGE_ADDED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_ADD;
} else {
op = PackageUpdatedTask.OP_UPDATE;
}
}
if (op != PackageUpdatedTask.OP_NONE) {
enqueuePackageUpdated(new PackageUpdatedTask(op, new String[] { packageName }));
}
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_AVAILABLE.equals(action)) {
// First, schedule to add these apps back in.
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(PackageUpdatedTask.OP_ADD, packages));
// Then, rebind everything.
startLoaderFromBackground();
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_UNAVAILABLE.equals(action)) {
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(
PackageUpdatedTask.OP_UNAVAILABLE, packages));
} else if (Intent.ACTION_LOCALE_CHANGED.equals(action)) {
// If we have changed locale we need to clear out the labels in all apps/workspace.
forceReload();
} else if (Intent.ACTION_CONFIGURATION_CHANGED.equals(action)) {
// Check if configuration change was an mcc/mnc change which would affect app resources
// and we would need to clear out the labels in all apps/workspace. Same handling as
// above for ACTION_LOCALE_CHANGED
Configuration currentConfig = context.getResources().getConfiguration();
if (mPreviousConfigMcc != currentConfig.mcc) {
Log.d(TAG, "Reload apps on config change. curr_mcc:"
+ currentConfig.mcc + " prevmcc:" + mPreviousConfigMcc);
forceReload();
}
// Update previousConfig
mPreviousConfigMcc = currentConfig.mcc;
} else if (SearchManager.INTENT_GLOBAL_SEARCH_ACTIVITY_CHANGED.equals(action) ||
SearchManager.INTENT_ACTION_SEARCHABLES_CHANGED.equals(action)) {
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
callbacks.bindSearchablesChanged();
}
}
}
}
private void forceReload() {
resetLoadedState(true, true);
// Do this here because if the launcher activity is running it will be restarted.
// If it's not running startLoaderFromBackground will merely tell it that it needs
// to reload.
startLoaderFromBackground();
}
public void resetLoadedState(boolean resetAllAppsLoaded, boolean resetWorkspaceLoaded) {
synchronized (mLock) {
// Stop any existing loaders first, so they don't set mAllAppsLoaded or
// mWorkspaceLoaded to true later
stopLoaderLocked();
if (resetAllAppsLoaded) mAllAppsLoaded = false;
if (resetWorkspaceLoaded) mWorkspaceLoaded = false;
}
}
/**
* When the launcher is in the background, it's possible for it to miss paired
* configuration changes. So whenever we trigger the loader from the background
* tell the launcher that it needs to re-run the loader when it comes back instead
* of doing it now.
*/
public void startLoaderFromBackground() {
boolean runLoader = false;
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
// Only actually run the loader if they're not paused.
if (!callbacks.setLoadOnResume()) {
runLoader = true;
}
}
}
if (runLoader) {
startLoader(false);
}
}
// If there is already a loader task running, tell it to stop.
// returns true if isLaunching() was true on the old task
private boolean stopLoaderLocked() {
boolean isLaunching = false;
LoaderTask oldTask = mLoaderTask;
if (oldTask != null) {
if (oldTask.isLaunching()) {
isLaunching = true;
}
oldTask.stopLocked();
}
return isLaunching;
}
public void startLoader(boolean isLaunching) {
synchronized (mLock) {
if (DEBUG_LOADERS) {
Log.d(TAG, "startLoader isLaunching=" + isLaunching);
}
// Don't bother to start the thread if we know it's not going to do anything
if (mCallbacks != null && mCallbacks.get() != null) {
// If there is already one running, tell it to stop.
// also, don't downgrade isLaunching if we're already running
isLaunching = isLaunching || stopLoaderLocked();
mLoaderTask = new LoaderTask(mApp, isLaunching);
sWorkerThread.setPriority(Thread.NORM_PRIORITY);
sWorker.post(mLoaderTask);
}
}
}
public void stopLoader() {
synchronized (mLock) {
if (mLoaderTask != null) {
mLoaderTask.stopLocked();
}
}
}
public boolean isAllAppsLoaded() {
return mAllAppsLoaded;
}
boolean isLoadingWorkspace() {
synchronized (mLock) {
if (mLoaderTask != null) {
return mLoaderTask.isLoadingWorkspace();
}
}
return false;
}
/**
* Runnable for the thread that loads the contents of the launcher:
* - workspace icons
* - widgets
* - all apps icons
*/
private class LoaderTask implements Runnable {
private Context mContext;
private Thread mWaitThread;
private boolean mIsLaunching;
private boolean mIsLoadingAndBindingWorkspace;
private boolean mStopped;
private boolean mLoadAndBindStepFinished;
private HashMap<Object, CharSequence> mLabelCache;
LoaderTask(Context context, boolean isLaunching) {
mContext = context;
mIsLaunching = isLaunching;
mLabelCache = new HashMap<Object, CharSequence>();
}
boolean isLaunching() {
return mIsLaunching;
}
boolean isLoadingWorkspace() {
return mIsLoadingAndBindingWorkspace;
}
private void loadAndBindWorkspace() {
mIsLoadingAndBindingWorkspace = true;
// Load the workspace
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindWorkspace mWorkspaceLoaded=" + mWorkspaceLoaded);
}
if (!mWorkspaceLoaded) {
loadWorkspace();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mWorkspaceLoaded = true;
}
}
// Bind the workspace
bindWorkspace();
}
private void waitForIdle() {
// Wait until the either we're stopped or the other threads are done.
// This way we don't start loading all apps until the workspace has settled
// down.
synchronized (LoaderTask.this) {
final long workspaceWaitTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
mHandler.postIdle(new Runnable() {
public void run() {
synchronized (LoaderTask.this) {
mLoadAndBindStepFinished = true;
if (DEBUG_LOADERS) {
Log.d(TAG, "done with previous binding step");
}
LoaderTask.this.notify();
}
}
});
while (!mStopped && !mLoadAndBindStepFinished) {
try {
this.wait();
} catch (InterruptedException ex) {
// Ignore
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "waited "
+ (SystemClock.uptimeMillis()-workspaceWaitTime)
+ "ms for previous step to finish binding");
}
}
}
public void run() {
// Optimize for end-user experience: if the Launcher is up and // running with the
// All Apps interface in the foreground, load All Apps first. Otherwise, load the
// workspace first (default).
final Callbacks cbk = mCallbacks.get();
final boolean loadWorkspaceFirst = cbk != null ? (!cbk.isAllAppsVisible()) : true;
keep_running: {
// Elevate priority when Home launches for the first time to avoid
// starving at boot time. Staring at a blank home is not cool.
synchronized (mLock) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to " +
(mIsLaunching ? "DEFAULT" : "BACKGROUND"));
android.os.Process.setThreadPriority(mIsLaunching
? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND);
}
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: loading workspace");
loadAndBindWorkspace();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: special: loading all apps");
loadAndBindAllApps();
}
if (mStopped) {
break keep_running;
}
// Whew! Hard work done. Slow us down, and wait until the UI thread has
// settled down.
synchronized (mLock) {
if (mIsLaunching) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to BACKGROUND");
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
}
}
waitForIdle();
// second step
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: loading all apps");
loadAndBindAllApps();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: special: loading workspace");
loadAndBindWorkspace();
}
// Restore the default thread priority after we are done loading items
synchronized (mLock) {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_DEFAULT);
}
}
// Update the saved icons if necessary
if (DEBUG_LOADERS) Log.d(TAG, "Comparing loaded icons to database icons");
for (Object key : sDbIconCache.keySet()) {
updateSavedIcon(mContext, (ShortcutInfo) key, sDbIconCache.get(key));
}
sDbIconCache.clear();
// Clear out this reference, otherwise we end up holding it until all of the
// callback runnables are done.
mContext = null;
synchronized (mLock) {
// If we are still the last one to be scheduled, remove ourselves.
if (mLoaderTask == this) {
mLoaderTask = null;
}
}
}
public void stopLocked() {
synchronized (LoaderTask.this) {
mStopped = true;
this.notify();
}
}
/**
* Gets the callbacks object. If we've been stopped, or if the launcher object
* has somehow been garbage collected, return null instead. Pass in the Callbacks
* object that was around when the deferred message was scheduled, and if there's
* a new Callbacks object around then also return null. This will save us from
* calling onto it with data that will be ignored.
*/
Callbacks tryGetCallbacks(Callbacks oldCallbacks) {
synchronized (mLock) {
if (mStopped) {
return null;
}
if (mCallbacks == null) {
return null;
}
final Callbacks callbacks = mCallbacks.get();
if (callbacks != oldCallbacks) {
return null;
}
if (callbacks == null) {
Log.w(TAG, "no mCallbacks");
return null;
}
return callbacks;
}
}
// check & update map of what's occupied; used to discard overlapping/invalid items
private boolean checkItemPlacement(ItemInfo occupied[][][], ItemInfo item) {
int containerIndex = item.screen;
if (item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
// Return early if we detect that an item is under the hotseat button
if (mCallbacks == null || mCallbacks.get().isAllAppsButtonRank(item.screen)) {
return false;
}
// We use the last index to refer to the hotseat and the screen as the rank, so
// test and update the occupied state accordingly
if (occupied[Launcher.SCREEN_COUNT][item.screen][0] != null) {
Log.e(TAG, "Error loading shortcut into hotseat " + item
+ " into position (" + item.screen + ":" + item.cellX + "," + item.cellY
+ ") occupied by " + occupied[Launcher.SCREEN_COUNT][item.screen][0]);
return false;
} else {
occupied[Launcher.SCREEN_COUNT][item.screen][0] = item;
return true;
}
} else if (item.container != LauncherSettings.Favorites.CONTAINER_DESKTOP) {
// Skip further checking if it is not the hotseat or workspace container
return true;
}
// Check if any workspace icons overlap with each other
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
if (occupied[containerIndex][x][y] != null) {
Log.e(TAG, "Error loading shortcut " + item
+ " into cell (" + containerIndex + "-" + item.screen + ":"
+ x + "," + y
+ ") occupied by "
+ occupied[containerIndex][x][y]);
return false;
}
}
}
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
occupied[containerIndex][x][y] = item;
}
}
return true;
}
private void loadWorkspace() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
final Context context = mContext;
final ContentResolver contentResolver = context.getContentResolver();
final PackageManager manager = context.getPackageManager();
final AppWidgetManager widgets = AppWidgetManager.getInstance(context);
final boolean isSafeMode = manager.isSafeMode();
// Make sure the default workspace is loaded, if needed
mApp.getLauncherProvider().loadDefaultFavoritesIfNecessary();
sWorkspaceItems.clear();
sAppWidgets.clear();
sFolders.clear();
sItemsIdMap.clear();
sDbIconCache.clear();
final ArrayList<Long> itemsToRemove = new ArrayList<Long>();
final Cursor c = contentResolver.query(
LauncherSettings.Favorites.CONTENT_URI, null, null, null, null);
// +1 for the hotseat (it can be larger than the workspace)
// Load workspace in reverse order to ensure that latest items are loaded first (and
// before any earlier duplicates)
final ItemInfo occupied[][][] =
new ItemInfo[Launcher.SCREEN_COUNT + 1][mCellCountX + 1][mCellCountY + 1];
try {
final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID);
final int intentIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.INTENT);
final int titleIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.TITLE);
final int iconTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_TYPE);
final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON);
final int iconPackageIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_PACKAGE);
final int iconResourceIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_RESOURCE);
final int containerIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.CONTAINER);
final int itemTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ITEM_TYPE);
final int appWidgetIdIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.APPWIDGET_ID);
final int screenIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SPANY);
//final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI);
//final int displayModeIndex = c.getColumnIndexOrThrow(
// LauncherSettings.Favorites.DISPLAY_MODE);
ShortcutInfo info;
String intentDescription;
LauncherAppWidgetInfo appWidgetInfo;
int container;
long id;
Intent intent;
while (!mStopped && c.moveToNext()) {
try {
int itemType = c.getInt(itemTypeIndex);
switch (itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
intentDescription = c.getString(intentIndex);
try {
intent = Intent.parseUri(intentDescription, 0);
} catch (URISyntaxException e) {
continue;
}
if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) {
info = getShortcutInfo(manager, intent, context, c, iconIndex,
titleIndex, mLabelCache);
} else {
info = getShortcutInfo(c, context, iconTypeIndex,
iconPackageIndex, iconResourceIndex, iconIndex,
titleIndex);
// App shortcuts that used to be automatically added to Launcher
// didn't always have the correct intent flags set, so do that here
if (intent.getAction().equals(Intent.ACTION_MAIN) &&
intent.getCategories().contains(Intent.CATEGORY_LAUNCHER)) {
intent.addFlags(
Intent.FLAG_ACTIVITY_NEW_TASK |
Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
}
}
if (info != null) {
info.intent = intent;
info.id = c.getLong(idIndex);
container = c.getInt(containerIndex);
info.container = container;
info.screen = c.getInt(screenIndex);
info.cellX = c.getInt(cellXIndex);
info.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, info)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(info);
break;
default:
// Item is in a user folder
FolderInfo folderInfo =
findOrMakeFolder(sFolders, container);
folderInfo.add(info);
break;
}
sItemsIdMap.put(info.id, info);
// now that we've loaded everthing re-save it with the
// icon in case it disappears somehow.
queueIconToBeChecked(sDbIconCache, info, c, iconIndex);
} else {
// Failed to load the shortcut, probably because the
// activity manager couldn't resolve it (maybe the app
// was uninstalled), or the db row was somehow screwed up.
// Delete it.
id = c.getLong(idIndex);
Log.e(TAG, "Error loading shortcut " + id + ", removing it");
contentResolver.delete(LauncherSettings.Favorites.getContentUri(
id, false), null, null);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
id = c.getLong(idIndex);
FolderInfo folderInfo = findOrMakeFolder(sFolders, id);
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
container = c.getInt(containerIndex);
folderInfo.container = container;
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, folderInfo)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(folderInfo);
break;
}
sItemsIdMap.put(folderInfo.id, folderInfo);
sFolders.put(folderInfo.id, folderInfo);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
// Read all Launcher-specific widget details
int appWidgetId = c.getInt(appWidgetIdIndex);
id = c.getLong(idIndex);
final AppWidgetProviderInfo provider =
widgets.getAppWidgetInfo(appWidgetId);
if (!isSafeMode && (provider == null || provider.provider == null ||
provider.provider.getPackageName() == null)) {
String log = "Deleting widget that isn't installed anymore: id="
+ id + " appWidgetId=" + appWidgetId;
Log.e(TAG, log);
Launcher.sDumpLogs.add(log);
itemsToRemove.add(id);
} else {
appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId,
provider.provider);
appWidgetInfo.id = id;
appWidgetInfo.screen = c.getInt(screenIndex);
appWidgetInfo.cellX = c.getInt(cellXIndex);
appWidgetInfo.cellY = c.getInt(cellYIndex);
appWidgetInfo.spanX = c.getInt(spanXIndex);
appWidgetInfo.spanY = c.getInt(spanYIndex);
int[] minSpan = Launcher.getMinSpanForWidget(context, provider);
appWidgetInfo.minSpanX = minSpan[0];
appWidgetInfo.minSpanY = minSpan[1];
container = c.getInt(containerIndex);
if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP &&
container != LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
Log.e(TAG, "Widget found where container "
+ "!= CONTAINER_DESKTOP nor CONTAINER_HOTSEAT - ignoring!");
continue;
}
appWidgetInfo.container = c.getInt(containerIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, appWidgetInfo)) {
break;
}
sItemsIdMap.put(appWidgetInfo.id, appWidgetInfo);
sAppWidgets.add(appWidgetInfo);
}
break;
}
} catch (Exception e) {
Log.w(TAG, "Desktop items loading interrupted:", e);
}
}
} finally {
c.close();
}
if (itemsToRemove.size() > 0) {
ContentProviderClient client = contentResolver.acquireContentProviderClient(
LauncherSettings.Favorites.CONTENT_URI);
// Remove dead items
for (long id : itemsToRemove) {
if (DEBUG_LOADERS) {
Log.d(TAG, "Removed id = " + id);
}
// Don't notify content observers
try {
client.delete(LauncherSettings.Favorites.getContentUri(id, false),
null, null);
} catch (RemoteException e) {
Log.w(TAG, "Could not remove id = " + id);
}
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms");
Log.d(TAG, "workspace layout: ");
for (int y = 0; y < mCellCountY; y++) {
String line = "";
for (int s = 0; s < Launcher.SCREEN_COUNT; s++) {
if (s > 0) {
line += " | ";
}
for (int x = 0; x < mCellCountX; x++) {
line += ((occupied[s][x][y] != null) ? "#" : ".");
}
}
Log.d(TAG, "[ " + line + " ]");
}
}
}
/**
* Read everything out of our database.
*/
private void bindWorkspace() {
final long t = SystemClock.uptimeMillis();
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher");
return;
}
// Get the list of workspace items to load and unbind the existing ShortcutInfos
// before we call startBinding() below.
final int currentScreen = oldCallbacks.getCurrentWorkspaceScreen();
final ArrayList<ItemInfo> tmpWorkspaceItems = unbindWorkspaceItemsOnMainThread();
// Order the items for loading as follows: current workspace, hotseat, everything else
Collections.sort(tmpWorkspaceItems, new Comparator<ItemInfo>() {
@Override
public int compare(ItemInfo lhs, ItemInfo rhs) {
int cellCountX = LauncherModel.getCellCountX();
int cellCountY = LauncherModel.getCellCountY();
int screenOffset = cellCountX * cellCountY;
int containerOffset = screenOffset * (Launcher.SCREEN_COUNT + 1); // +1 hotseat
long lr = (lhs.container * containerOffset + lhs.screen * screenOffset +
lhs.cellY * cellCountX + lhs.cellX);
long rr = (rhs.container * containerOffset + rhs.screen * screenOffset +
rhs.cellY * cellCountX + rhs.cellX);
return (int) (lr - rr);
}
});
// Precondition: the items are ordered by page, screen
final ArrayList<ItemInfo> workspaceItems = new ArrayList<ItemInfo>();
for (ItemInfo ii : tmpWorkspaceItems) {
// Prepend the current items, hotseat items, append everything else
if (ii.container == LauncherSettings.Favorites.CONTAINER_DESKTOP &&
ii.screen == currentScreen) {
workspaceItems.add(0, ii);
} else if (ii.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
workspaceItems.add(0, ii);
} else {
workspaceItems.add(ii);
}
}
// Tell the workspace that we're about to start firing items at it
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.startBinding();
}
}
});
// Add the items to the workspace.
int N = workspaceItems.size();
for (int i=0; i<N; i+=ITEMS_CHUNK) {
final int start = i;
final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindItems(workspaceItems, start, start+chunkSize);
}
}
});
}
// Ensure that we don't use the same folders data structure on the main thread
final HashMap<Long, FolderInfo> folders = new HashMap<Long, FolderInfo>(sFolders);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindFolders(folders);
}
}
});
// Wait until the queue goes empty.
mHandler.post(new Runnable() {
public void run() {
if (DEBUG_LOADERS) {
Log.d(TAG, "Going to start binding widgets soon.");
}
}
});
// Bind the widgets, one at a time.
// WARNING: this is calling into the workspace from the background thread,
// but since getCurrentScreen() just returns the int, we should be okay. This
// is just a hint for the order, and if it's wrong, we'll be okay.
// TODO: instead, we should have that push the current screen into here.
N = sAppWidgets.size();
// once for the current screen
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen == currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// once for the other screens
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen != currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// Tell the workspace that we're done.
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.finishBindingItems();
}
}
});
// Cleanup
mHandler.post(new Runnable() {
public void run() {
// If we're profiling, ensure this is the last thing in the queue.
if (DEBUG_LOADERS) {
Log.d(TAG, "bound workspace in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
mIsLoadingAndBindingWorkspace = false;
}
});
}
private void loadAndBindAllApps() {
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindAllApps mAllAppsLoaded=" + mAllAppsLoaded);
}
if (!mAllAppsLoaded) {
loadAllAppsByBatch();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mAllAppsLoaded = true;
}
} else {
onlyBindAllApps();
}
}
private void onlyBindAllApps() {
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (onlyBindAllApps)");
return;
}
// shallow copy
@SuppressWarnings("unchecked")
final ArrayList<ApplicationInfo> list
= (ArrayList<ApplicationInfo>) mAllAppsList.data.clone();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAllApplications(list);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound all " + list.size() + " apps from cache in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
}
});
}
private void loadAllAppsByBatch() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (loadAllAppsByBatch)");
return;
}
final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);
final PackageManager packageManager = mContext.getPackageManager();
List<ResolveInfo> apps = null;
int N = Integer.MAX_VALUE;
int startIndex;
int i=0;
int batchSize = -1;
while (i < N && !mStopped) {
if (i == 0) {
mAllAppsList.clear();
final long qiaTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
apps = packageManager.queryIntentActivities(mainIntent, 0);
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities took "
+ (SystemClock.uptimeMillis()-qiaTime) + "ms");
}
if (apps == null) {
return;
}
N = apps.size();
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities got " + N + " apps");
}
if (N == 0) {
// There are no apps?!?
return;
}
if (mBatchSize == 0) {
batchSize = N;
} else {
batchSize = mBatchSize;
}
final long sortTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
Collections.sort(apps,
new LauncherModel.ShortcutNameComparator(packageManager, mLabelCache));
if (DEBUG_LOADERS) {
Log.d(TAG, "sort took "
+ (SystemClock.uptimeMillis()-sortTime) + "ms");
}
}
final long t2 = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
startIndex = i;
for (int j=0; i<N && j<batchSize; j++) {
// This builds the icon bitmaps.
mAllAppsList.add(new ApplicationInfo(packageManager, apps.get(i),
mIconCache, mLabelCache));
i++;
}
final boolean first = i <= batchSize;
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
final ArrayList<ApplicationInfo> added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
if (callbacks != null) {
if (first) {
callbacks.bindAllApplications(added);
} else {
callbacks.bindAppsAdded(added);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound " + added.size() + " apps in "
+ (SystemClock.uptimeMillis() - t) + "ms");
}
} else {
Log.i(TAG, "not binding apps: no Launcher activity");
}
}
});
if (DEBUG_LOADERS) {
Log.d(TAG, "batch of " + (i-startIndex) + " icons processed in "
+ (SystemClock.uptimeMillis()-t2) + "ms");
}
if (mAllAppsLoadDelay > 0 && i < N) {
try {
if (DEBUG_LOADERS) {
Log.d(TAG, "sleeping for " + mAllAppsLoadDelay + "ms");
}
Thread.sleep(mAllAppsLoadDelay);
} catch (InterruptedException exc) { }
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "cached all " + N + " apps in "
+ (SystemClock.uptimeMillis()-t) + "ms"
+ (mAllAppsLoadDelay > 0 ? " (including delay)" : ""));
}
}
public void dumpState() {
Log.d(TAG, "mLoaderTask.mContext=" + mContext);
Log.d(TAG, "mLoaderTask.mWaitThread=" + mWaitThread);
Log.d(TAG, "mLoaderTask.mIsLaunching=" + mIsLaunching);
Log.d(TAG, "mLoaderTask.mStopped=" + mStopped);
Log.d(TAG, "mLoaderTask.mLoadAndBindStepFinished=" + mLoadAndBindStepFinished);
Log.d(TAG, "mItems size=" + sWorkspaceItems.size());
}
}
void enqueuePackageUpdated(PackageUpdatedTask task) {
sWorker.post(task);
}
private class PackageUpdatedTask implements Runnable {
int mOp;
String[] mPackages;
public static final int OP_NONE = 0;
public static final int OP_ADD = 1;
public static final int OP_UPDATE = 2;
public static final int OP_REMOVE = 3; // uninstlled
public static final int OP_UNAVAILABLE = 4; // external media unmounted
public PackageUpdatedTask(int op, String[] packages) {
mOp = op;
mPackages = packages;
}
public void run() {
final Context context = mApp;
final String[] packages = mPackages;
final int N = packages.length;
switch (mOp) {
case OP_ADD:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.addPackage " + packages[i]);
mAllAppsList.addPackage(context, packages[i]);
}
break;
case OP_UPDATE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.updatePackage " + packages[i]);
mAllAppsList.updatePackage(context, packages[i]);
}
break;
case OP_REMOVE:
case OP_UNAVAILABLE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.removePackage " + packages[i]);
mAllAppsList.removePackage(packages[i]);
}
break;
}
ArrayList<ApplicationInfo> added = null;
ArrayList<ApplicationInfo> removed = null;
ArrayList<ApplicationInfo> modified = null;
if (mAllAppsList.added.size() > 0) {
added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
}
if (mAllAppsList.removed.size() > 0) {
removed = mAllAppsList.removed;
mAllAppsList.removed = new ArrayList<ApplicationInfo>();
for (ApplicationInfo info: removed) {
mIconCache.remove(info.intent.getComponent());
}
}
if (mAllAppsList.modified.size() > 0) {
modified = mAllAppsList.modified;
mAllAppsList.modified = new ArrayList<ApplicationInfo>();
}
final Callbacks callbacks = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == null) {
Log.w(TAG, "Nobody to tell about the new app. Launcher is probably loading.");
return;
}
if (added != null) {
final ArrayList<ApplicationInfo> addedFinal = added;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsAdded(addedFinal);
}
}
});
}
if (modified != null) {
final ArrayList<ApplicationInfo> modifiedFinal = modified;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsUpdated(modifiedFinal);
}
}
});
}
if (removed != null) {
final boolean permanent = mOp != OP_UNAVAILABLE;
final ArrayList<ApplicationInfo> removedFinal = removed;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsRemoved(removedFinal, permanent);
}
}
});
}
mHandler.post(new Runnable() {
@Override
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindPackagesUpdated();
}
}
});
}
}
/**
* Returns all the Workspace ShortcutInfos associated with a particular package.
* @param intent
* @return
*/
ArrayList<ShortcutInfo> getShortcutInfosForPackage(String packageName) {
ArrayList<ShortcutInfo> infos = new ArrayList<ShortcutInfo>();
for (ItemInfo i : sWorkspaceItems) {
if (i instanceof ShortcutInfo) {
ShortcutInfo info = (ShortcutInfo) i;
if (packageName.equals(info.getPackageName())) {
infos.add(info);
}
}
}
return infos;
}
/**
* This is called from the code that adds shortcuts from the intent receiver. This
* doesn't have a Cursor, but
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context) {
return getShortcutInfo(manager, intent, context, null, -1, -1, null);
}
/**
* Make an ShortcutInfo object for a shortcut that is an application.
*
* If c is not null, then it will be used to fill in missing data like the title and icon.
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context,
Cursor c, int iconIndex, int titleIndex, HashMap<Object, CharSequence> labelCache) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
ComponentName componentName = intent.getComponent();
if (componentName == null) {
return null;
}
try {
PackageInfo pi = manager.getPackageInfo(componentName.getPackageName(), 0);
if (!pi.applicationInfo.enabled) {
// If we return null here, the corresponding item will be removed from the launcher
// db and will not appear in the workspace.
return null;
}
} catch (NameNotFoundException e) {
Log.d(TAG, "getPackInfo failed for package " + componentName.getPackageName());
}
// TODO: See if the PackageManager knows about this case. If it doesn't
// then return null & delete this.
// the resource -- This may implicitly give us back the fallback icon,
// but don't worry about that. All we're doing with usingFallbackIcon is
// to avoid saving lots of copies of that in the database, and most apps
// have icons anyway.
// Attempt to use queryIntentActivities to get the ResolveInfo (with IntentFilter info) and
// if that fails, or is ambiguious, fallback to the standard way of getting the resolve info
// via resolveActivity().
ResolveInfo resolveInfo = null;
ComponentName oldComponent = intent.getComponent();
Intent newIntent = new Intent(intent.getAction(), null);
newIntent.addCategory(Intent.CATEGORY_LAUNCHER);
newIntent.setPackage(oldComponent.getPackageName());
List<ResolveInfo> infos = manager.queryIntentActivities(newIntent, 0);
for (ResolveInfo i : infos) {
ComponentName cn = new ComponentName(i.activityInfo.packageName,
i.activityInfo.name);
if (cn.equals(oldComponent)) {
resolveInfo = i;
}
}
if (resolveInfo == null) {
resolveInfo = manager.resolveActivity(intent, 0);
}
if (resolveInfo != null) {
icon = mIconCache.getIcon(componentName, resolveInfo, labelCache);
}
// the db
if (icon == null) {
if (c != null) {
icon = getIconFromCursor(c, iconIndex, context);
}
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
info.setIcon(icon);
// from the resource
if (resolveInfo != null) {
ComponentName key = LauncherModel.getComponentNameFromResolveInfo(resolveInfo);
if (labelCache != null && labelCache.containsKey(key)) {
info.title = labelCache.get(key);
} else {
info.title = resolveInfo.activityInfo.loadLabel(manager);
if (labelCache != null) {
labelCache.put(key, info.title);
}
}
}
// from the db
if (info.title == null) {
if (c != null) {
info.title = c.getString(titleIndex);
}
}
// fall back to the class name of the activity
if (info.title == null) {
info.title = componentName.getClassName();
}
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION;
return info;
}
/**
* Make an ShortcutInfo object for a shortcut that isn't an application.
*/
private ShortcutInfo getShortcutInfo(Cursor c, Context context,
int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex,
int titleIndex) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT;
// TODO: If there's an explicit component and we can't install that, delete it.
info.title = c.getString(titleIndex);
int iconType = c.getInt(iconTypeIndex);
switch (iconType) {
case LauncherSettings.Favorites.ICON_TYPE_RESOURCE:
String packageName = c.getString(iconPackageIndex);
String resourceName = c.getString(iconResourceIndex);
PackageManager packageManager = context.getPackageManager();
info.customIcon = false;
// the resource
try {
Resources resources = packageManager.getResourcesForApplication(packageName);
if (resources != null) {
final int id = resources.getIdentifier(resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
}
} catch (Exception e) {
// drop this. we have other places to look for icons
}
// the db
if (icon == null) {
icon = getIconFromCursor(c, iconIndex, context);
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
break;
case LauncherSettings.Favorites.ICON_TYPE_BITMAP:
icon = getIconFromCursor(c, iconIndex, context);
if (icon == null) {
icon = getFallbackIcon();
info.customIcon = false;
info.usingFallbackIcon = true;
} else {
info.customIcon = true;
}
break;
default:
icon = getFallbackIcon();
info.usingFallbackIcon = true;
info.customIcon = false;
break;
}
info.setIcon(icon);
return info;
}
Bitmap getIconFromCursor(Cursor c, int iconIndex, Context context) {
@SuppressWarnings("all") // suppress dead code warning
final boolean debug = false;
if (debug) {
Log.d(TAG, "getIconFromCursor app="
+ c.getString(c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE)));
}
byte[] data = c.getBlob(iconIndex);
try {
return Utilities.createIconBitmap(
BitmapFactory.decodeByteArray(data, 0, data.length), context);
} catch (Exception e) {
return null;
}
}
ShortcutInfo addShortcut(Context context, Intent data, long container, int screen,
int cellX, int cellY, boolean notify) {
final ShortcutInfo info = infoFromShortcutIntent(context, data, null);
if (info == null) {
return null;
}
addItemToDatabase(context, info, container, screen, cellX, cellY, notify);
return info;
}
/**
* Attempts to find an AppWidgetProviderInfo that matches the given component.
*/
AppWidgetProviderInfo findAppWidgetProviderInfoWithComponent(Context context,
ComponentName component) {
List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
for (AppWidgetProviderInfo info : widgets) {
if (info.provider.equals(component)) {
return info;
}
}
return null;
}
/**
* Returns a list of all the widgets that can handle configuration with a particular mimeType.
*/
List<WidgetMimeTypeHandlerData> resolveWidgetsForMimeType(Context context, String mimeType) {
final PackageManager packageManager = context.getPackageManager();
final List<WidgetMimeTypeHandlerData> supportedConfigurationActivities =
new ArrayList<WidgetMimeTypeHandlerData>();
final Intent supportsIntent =
new Intent(InstallWidgetReceiver.ACTION_SUPPORTS_CLIPDATA_MIMETYPE);
supportsIntent.setType(mimeType);
// Create a set of widget configuration components that we can test against
final List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
final HashMap<ComponentName, AppWidgetProviderInfo> configurationComponentToWidget =
new HashMap<ComponentName, AppWidgetProviderInfo>();
for (AppWidgetProviderInfo info : widgets) {
configurationComponentToWidget.put(info.configure, info);
}
// Run through each of the intents that can handle this type of clip data, and cross
// reference them with the components that are actual configuration components
final List<ResolveInfo> activities = packageManager.queryIntentActivities(supportsIntent,
PackageManager.MATCH_DEFAULT_ONLY);
for (ResolveInfo info : activities) {
final ActivityInfo activityInfo = info.activityInfo;
final ComponentName infoComponent = new ComponentName(activityInfo.packageName,
activityInfo.name);
if (configurationComponentToWidget.containsKey(infoComponent)) {
supportedConfigurationActivities.add(
new InstallWidgetReceiver.WidgetMimeTypeHandlerData(info,
configurationComponentToWidget.get(infoComponent)));
}
}
return supportedConfigurationActivities;
}
ShortcutInfo infoFromShortcutIntent(Context context, Intent data, Bitmap fallbackIcon) {
Intent intent = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_INTENT);
String name = data.getStringExtra(Intent.EXTRA_SHORTCUT_NAME);
Parcelable bitmap = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON);
if (intent == null) {
// If the intent is null, we can't construct a valid ShortcutInfo, so we return null
Log.e(TAG, "Can't construct ShorcutInfo with null intent");
return null;
}
Bitmap icon = null;
boolean customIcon = false;
ShortcutIconResource iconResource = null;
if (bitmap != null && bitmap instanceof Bitmap) {
icon = Utilities.createIconBitmap(new FastBitmapDrawable((Bitmap)bitmap), context);
customIcon = true;
} else {
Parcelable extra = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON_RESOURCE);
if (extra != null && extra instanceof ShortcutIconResource) {
try {
iconResource = (ShortcutIconResource) extra;
final PackageManager packageManager = context.getPackageManager();
Resources resources = packageManager.getResourcesForApplication(
iconResource.packageName);
final int id = resources.getIdentifier(iconResource.resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
} catch (Exception e) {
Log.w(TAG, "Could not load shortcut icon: " + extra);
}
}
}
final ShortcutInfo info = new ShortcutInfo();
if (icon == null) {
if (fallbackIcon != null) {
icon = fallbackIcon;
} else {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
}
info.setIcon(icon);
info.title = name;
info.intent = intent;
info.customIcon = customIcon;
info.iconResource = iconResource;
return info;
}
boolean queueIconToBeChecked(HashMap<Object, byte[]> cache, ShortcutInfo info, Cursor c,
int iconIndex) {
// If apps can't be on SD, don't even bother.
if (!mAppsCanBeOnExternalStorage) {
return false;
}
// If this icon doesn't have a custom icon, check to see
// what's stored in the DB, and if it doesn't match what
// we're going to show, store what we are going to show back
// into the DB. We do this so when we're loading, if the
// package manager can't find an icon (for example because
// the app is on SD) then we can use that instead.
if (!info.customIcon && !info.usingFallbackIcon) {
cache.put(info, c.getBlob(iconIndex));
return true;
}
return false;
}
void updateSavedIcon(Context context, ShortcutInfo info, byte[] data) {
boolean needSave = false;
try {
if (data != null) {
Bitmap saved = BitmapFactory.decodeByteArray(data, 0, data.length);
Bitmap loaded = info.getIcon(mIconCache);
needSave = !saved.sameAs(loaded);
} else {
needSave = true;
}
} catch (Exception e) {
needSave = true;
}
if (needSave) {
Log.d(TAG, "going to save icon bitmap for info=" + info);
// This is slower than is ideal, but this only happens once
// or when the app is updated with a new icon.
updateItemInDatabase(context, info);
}
}
/**
* Return an existing FolderInfo object if we have encountered this ID previously,
* or make a new one.
*/
private static FolderInfo findOrMakeFolder(HashMap<Long, FolderInfo> folders, long id) {
// See if a placeholder was created for us already
FolderInfo folderInfo = folders.get(id);
if (folderInfo == null) {
// No placeholder -- create a new instance
folderInfo = new FolderInfo();
folders.put(id, folderInfo);
}
return folderInfo;
}
private static final Collator sCollator = Collator.getInstance();
public static final Comparator<ApplicationInfo> APP_NAME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
int result = sCollator.compare(a.title.toString(), b.title.toString());
if (result == 0) {
result = a.componentName.compareTo(b.componentName);
}
return result;
}
};
public static final Comparator<ApplicationInfo> APP_INSTALL_TIME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
if (a.firstInstallTime < b.firstInstallTime) return 1;
if (a.firstInstallTime > b.firstInstallTime) return -1;
return 0;
}
};
public static final Comparator<AppWidgetProviderInfo> WIDGET_NAME_COMPARATOR
= new Comparator<AppWidgetProviderInfo>() {
public final int compare(AppWidgetProviderInfo a, AppWidgetProviderInfo b) {
return sCollator.compare(a.label.toString(), b.label.toString());
}
};
static ComponentName getComponentNameFromResolveInfo(ResolveInfo info) {
if (info.activityInfo != null) {
return new ComponentName(info.activityInfo.packageName, info.activityInfo.name);
} else {
return new ComponentName(info.serviceInfo.packageName, info.serviceInfo.name);
}
}
public static class ShortcutNameComparator implements Comparator<ResolveInfo> {
private PackageManager mPackageManager;
private HashMap<Object, CharSequence> mLabelCache;
ShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, CharSequence>();
}
ShortcutNameComparator(PackageManager pm, HashMap<Object, CharSequence> labelCache) {
mPackageManager = pm;
mLabelCache = labelCache;
}
public final int compare(ResolveInfo a, ResolveInfo b) {
CharSequence labelA, labelB;
ComponentName keyA = LauncherModel.getComponentNameFromResolveInfo(a);
ComponentName keyB = LauncherModel.getComponentNameFromResolveInfo(b);
if (mLabelCache.containsKey(keyA)) {
labelA = mLabelCache.get(keyA);
} else {
labelA = a.loadLabel(mPackageManager).toString();
mLabelCache.put(keyA, labelA);
}
if (mLabelCache.containsKey(keyB)) {
labelB = mLabelCache.get(keyB);
} else {
labelB = b.loadLabel(mPackageManager).toString();
mLabelCache.put(keyB, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public static class WidgetAndShortcutNameComparator implements Comparator<Object> {
private PackageManager mPackageManager;
private HashMap<Object, String> mLabelCache;
WidgetAndShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, String>();
}
public final int compare(Object a, Object b) {
String labelA, labelB;
if (mLabelCache.containsKey(a)) {
labelA = mLabelCache.get(a);
} else {
labelA = (a instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) a).label :
((ResolveInfo) a).loadLabel(mPackageManager).toString();
mLabelCache.put(a, labelA);
}
if (mLabelCache.containsKey(b)) {
labelB = mLabelCache.get(b);
} else {
labelB = (b instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) b).label :
((ResolveInfo) b).loadLabel(mPackageManager).toString();
mLabelCache.put(b, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public void dumpState() {
Log.d(TAG, "mCallbacks=" + mCallbacks);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.data", mAllAppsList.data);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.added", mAllAppsList.added);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.removed", mAllAppsList.removed);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.modified", mAllAppsList.modified);
if (mLoaderTask != null) {
mLoaderTask.dumpState();
} else {
Log.d(TAG, "mLoaderTask=null");
}
}
}
| static boolean shortcutExists(Context context, String title, Intent intent) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI,
new String[] { "title", "intent" }, "title=? and intent=?",
new String[] { title, intent.toUri(0) }, null);
boolean result = false;
try {
result = c.moveToFirst();
} finally {
c.close();
}
return result;
}
/**
* Returns an ItemInfo array containing all the items in the LauncherModel.
* The ItemInfo.id is not set through this function.
*/
static ArrayList<ItemInfo> getItemsInLocalCoordinates(Context context) {
ArrayList<ItemInfo> items = new ArrayList<ItemInfo>();
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] {
LauncherSettings.Favorites.ITEM_TYPE, LauncherSettings.Favorites.CONTAINER,
LauncherSettings.Favorites.SCREEN, LauncherSettings.Favorites.CELLX, LauncherSettings.Favorites.CELLY,
LauncherSettings.Favorites.SPANX, LauncherSettings.Favorites.SPANY }, null, null, null);
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SPANY);
try {
while (c.moveToNext()) {
ItemInfo item = new ItemInfo();
item.cellX = c.getInt(cellXIndex);
item.cellY = c.getInt(cellYIndex);
item.spanX = c.getInt(spanXIndex);
item.spanY = c.getInt(spanYIndex);
item.container = c.getInt(containerIndex);
item.itemType = c.getInt(itemTypeIndex);
item.screen = c.getInt(screenIndex);
items.add(item);
}
} catch (Exception e) {
items.clear();
} finally {
c.close();
}
return items;
}
/**
* Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList.
*/
FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) {
final ContentResolver cr = context.getContentResolver();
Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null,
"_id=? and (itemType=? or itemType=?)",
new String[] { String.valueOf(id),
String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_FOLDER)}, null);
try {
if (c.moveToFirst()) {
final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE);
final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE);
final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER);
final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY);
FolderInfo folderInfo = null;
switch (c.getInt(itemTypeIndex)) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
folderInfo = findOrMakeFolder(folderList, id);
break;
}
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
folderInfo.container = c.getInt(containerIndex);
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
return folderInfo;
}
} finally {
c.close();
}
return null;
}
/**
* Add an item to the database in a specified container. Sets the container, screen, cellX and
* cellY fields of the item. Also assigns an ID to the item.
*/
static void addItemToDatabase(Context context, final ItemInfo item, final long container,
final int screen, final int cellX, final int cellY, final boolean notify) {
item.container = container;
item.cellX = cellX;
item.cellY = cellY;
// We store hotseat items in canonical form which is this orientation invariant position
// in the hotseat
if (context instanceof Launcher && screen < 0 &&
container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
item.screen = ((Launcher) context).getHotseat().getOrderInHotseat(cellX, cellY);
} else {
item.screen = screen;
}
final ContentValues values = new ContentValues();
final ContentResolver cr = context.getContentResolver();
item.onAddToDatabase(values);
LauncherApplication app = (LauncherApplication) context.getApplicationContext();
item.id = app.getLauncherProvider().generateNewId();
values.put(LauncherSettings.Favorites._ID, item.id);
item.updateValuesWithCoordinates(values, item.cellX, item.cellY);
Runnable r = new Runnable() {
public void run() {
cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI :
LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values);
if (sItemsIdMap.containsKey(item.id)) {
// we should not be adding new items in the db with the same id
throw new RuntimeException("Error: ItemInfo id (" + item.id + ") passed to " +
"addItemToDatabase already exists." + item.toString());
}
sItemsIdMap.put(item.id, item);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.put(item.id, (FolderInfo) item);
// Fall through
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
if (item.container == LauncherSettings.Favorites.CONTAINER_DESKTOP ||
item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
sWorkspaceItems.add(item);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.add((LauncherAppWidgetInfo) item);
break;
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Creates a new unique child id, for a given cell span across all layouts.
*/
static int getCellLayoutChildId(
long container, int screen, int localCellX, int localCellY, int spanX, int spanY) {
return (((int) container & 0xFF) << 24)
| (screen & 0xFF) << 16 | (localCellX & 0xFF) << 8 | (localCellY & 0xFF);
}
static int getCellCountX() {
return mCellCountX;
}
static int getCellCountY() {
return mCellCountY;
}
/**
* Updates the model orientation helper to take into account the current layout dimensions
* when performing local/canonical coordinate transformations.
*/
static void updateWorkspaceLayoutCells(int shortAxisCellCount, int longAxisCellCount) {
mCellCountX = shortAxisCellCount;
mCellCountY = longAxisCellCount;
}
/**
* Removes the specified item from the database
* @param context
* @param item
*/
static void deleteItemFromDatabase(Context context, final ItemInfo item) {
final ContentResolver cr = context.getContentResolver();
final Uri uriToDelete = LauncherSettings.Favorites.getContentUri(item.id, false);
Runnable r = new Runnable() {
public void run() {
cr.delete(uriToDelete, null, null);
switch (item.itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
sFolders.remove(item.id);
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
sWorkspaceItems.remove(item);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
sAppWidgets.remove((LauncherAppWidgetInfo) item);
break;
}
sItemsIdMap.remove(item.id);
sDbIconCache.remove(item);
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Remove the contents of the specified folder from the database
*/
static void deleteFolderContentsFromDatabase(Context context, final FolderInfo info) {
final ContentResolver cr = context.getContentResolver();
Runnable r = new Runnable() {
public void run() {
cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null);
sItemsIdMap.remove(info.id);
sFolders.remove(info.id);
sDbIconCache.remove(info);
sWorkspaceItems.remove(info);
cr.delete(LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION,
LauncherSettings.Favorites.CONTAINER + "=" + info.id, null);
for (ItemInfo childInfo : info.contents) {
sItemsIdMap.remove(childInfo.id);
sDbIconCache.remove(childInfo);
}
}
};
if (sWorkerThread.getThreadId() == Process.myTid()) {
r.run();
} else {
sWorker.post(r);
}
}
/**
* Set this as the current Launcher activity object for the loader.
*/
public void initialize(Callbacks callbacks) {
synchronized (mLock) {
mCallbacks = new WeakReference<Callbacks>(callbacks);
}
}
/**
* Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and
* ACTION_PACKAGE_CHANGED.
*/
@Override
public void onReceive(Context context, Intent intent) {
if (DEBUG_LOADERS) Log.d(TAG, "onReceive intent=" + intent);
final String action = intent.getAction();
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)
|| Intent.ACTION_PACKAGE_REMOVED.equals(action)
|| Intent.ACTION_PACKAGE_ADDED.equals(action)) {
final String packageName = intent.getData().getSchemeSpecificPart();
final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false);
int op = PackageUpdatedTask.OP_NONE;
if (packageName == null || packageName.length() == 0) {
// they sent us a bad intent
return;
}
if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) {
op = PackageUpdatedTask.OP_UPDATE;
} else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_REMOVE;
}
// else, we are replacing the package, so a PACKAGE_ADDED will be sent
// later, we will update the package at this time
} else if (Intent.ACTION_PACKAGE_ADDED.equals(action)) {
if (!replacing) {
op = PackageUpdatedTask.OP_ADD;
} else {
op = PackageUpdatedTask.OP_UPDATE;
}
}
if (op != PackageUpdatedTask.OP_NONE) {
enqueuePackageUpdated(new PackageUpdatedTask(op, new String[] { packageName }));
}
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_AVAILABLE.equals(action)) {
// First, schedule to add these apps back in.
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(PackageUpdatedTask.OP_ADD, packages));
// Then, rebind everything.
startLoaderFromBackground();
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_UNAVAILABLE.equals(action)) {
String[] packages = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
enqueuePackageUpdated(new PackageUpdatedTask(
PackageUpdatedTask.OP_UNAVAILABLE, packages));
} else if (Intent.ACTION_LOCALE_CHANGED.equals(action)) {
// If we have changed locale we need to clear out the labels in all apps/workspace.
forceReload();
} else if (Intent.ACTION_CONFIGURATION_CHANGED.equals(action)) {
// Check if configuration change was an mcc/mnc change which would affect app resources
// and we would need to clear out the labels in all apps/workspace. Same handling as
// above for ACTION_LOCALE_CHANGED
Configuration currentConfig = context.getResources().getConfiguration();
if (mPreviousConfigMcc != currentConfig.mcc) {
Log.d(TAG, "Reload apps on config change. curr_mcc:"
+ currentConfig.mcc + " prevmcc:" + mPreviousConfigMcc);
forceReload();
}
// Update previousConfig
mPreviousConfigMcc = currentConfig.mcc;
} else if (SearchManager.INTENT_GLOBAL_SEARCH_ACTIVITY_CHANGED.equals(action) ||
SearchManager.INTENT_ACTION_SEARCHABLES_CHANGED.equals(action)) {
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
callbacks.bindSearchablesChanged();
}
}
}
}
private void forceReload() {
resetLoadedState(true, true);
// Do this here because if the launcher activity is running it will be restarted.
// If it's not running startLoaderFromBackground will merely tell it that it needs
// to reload.
startLoaderFromBackground();
}
public void resetLoadedState(boolean resetAllAppsLoaded, boolean resetWorkspaceLoaded) {
synchronized (mLock) {
// Stop any existing loaders first, so they don't set mAllAppsLoaded or
// mWorkspaceLoaded to true later
stopLoaderLocked();
if (resetAllAppsLoaded) mAllAppsLoaded = false;
if (resetWorkspaceLoaded) mWorkspaceLoaded = false;
}
}
/**
* When the launcher is in the background, it's possible for it to miss paired
* configuration changes. So whenever we trigger the loader from the background
* tell the launcher that it needs to re-run the loader when it comes back instead
* of doing it now.
*/
public void startLoaderFromBackground() {
boolean runLoader = false;
if (mCallbacks != null) {
Callbacks callbacks = mCallbacks.get();
if (callbacks != null) {
// Only actually run the loader if they're not paused.
if (!callbacks.setLoadOnResume()) {
runLoader = true;
}
}
}
if (runLoader) {
startLoader(false);
}
}
// If there is already a loader task running, tell it to stop.
// returns true if isLaunching() was true on the old task
private boolean stopLoaderLocked() {
boolean isLaunching = false;
LoaderTask oldTask = mLoaderTask;
if (oldTask != null) {
if (oldTask.isLaunching()) {
isLaunching = true;
}
oldTask.stopLocked();
}
return isLaunching;
}
public void startLoader(boolean isLaunching) {
synchronized (mLock) {
if (DEBUG_LOADERS) {
Log.d(TAG, "startLoader isLaunching=" + isLaunching);
}
// Don't bother to start the thread if we know it's not going to do anything
if (mCallbacks != null && mCallbacks.get() != null) {
// If there is already one running, tell it to stop.
// also, don't downgrade isLaunching if we're already running
isLaunching = isLaunching || stopLoaderLocked();
mLoaderTask = new LoaderTask(mApp, isLaunching);
sWorkerThread.setPriority(Thread.NORM_PRIORITY);
sWorker.post(mLoaderTask);
}
}
}
public void stopLoader() {
synchronized (mLock) {
if (mLoaderTask != null) {
mLoaderTask.stopLocked();
}
}
}
public boolean isAllAppsLoaded() {
return mAllAppsLoaded;
}
boolean isLoadingWorkspace() {
synchronized (mLock) {
if (mLoaderTask != null) {
return mLoaderTask.isLoadingWorkspace();
}
}
return false;
}
/**
* Runnable for the thread that loads the contents of the launcher:
* - workspace icons
* - widgets
* - all apps icons
*/
private class LoaderTask implements Runnable {
private Context mContext;
private Thread mWaitThread;
private boolean mIsLaunching;
private boolean mIsLoadingAndBindingWorkspace;
private boolean mStopped;
private boolean mLoadAndBindStepFinished;
private HashMap<Object, CharSequence> mLabelCache;
LoaderTask(Context context, boolean isLaunching) {
mContext = context;
mIsLaunching = isLaunching;
mLabelCache = new HashMap<Object, CharSequence>();
}
boolean isLaunching() {
return mIsLaunching;
}
boolean isLoadingWorkspace() {
return mIsLoadingAndBindingWorkspace;
}
private void loadAndBindWorkspace() {
mIsLoadingAndBindingWorkspace = true;
// Load the workspace
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindWorkspace mWorkspaceLoaded=" + mWorkspaceLoaded);
}
if (!mWorkspaceLoaded) {
loadWorkspace();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mWorkspaceLoaded = true;
}
}
// Bind the workspace
bindWorkspace();
}
private void waitForIdle() {
// Wait until the either we're stopped or the other threads are done.
// This way we don't start loading all apps until the workspace has settled
// down.
synchronized (LoaderTask.this) {
final long workspaceWaitTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
mHandler.postIdle(new Runnable() {
public void run() {
synchronized (LoaderTask.this) {
mLoadAndBindStepFinished = true;
if (DEBUG_LOADERS) {
Log.d(TAG, "done with previous binding step");
}
LoaderTask.this.notify();
}
}
});
while (!mStopped && !mLoadAndBindStepFinished) {
try {
this.wait();
} catch (InterruptedException ex) {
// Ignore
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "waited "
+ (SystemClock.uptimeMillis()-workspaceWaitTime)
+ "ms for previous step to finish binding");
}
}
}
public void run() {
// Optimize for end-user experience: if the Launcher is up and // running with the
// All Apps interface in the foreground, load All Apps first. Otherwise, load the
// workspace first (default).
final Callbacks cbk = mCallbacks.get();
final boolean loadWorkspaceFirst = cbk != null ? (!cbk.isAllAppsVisible()) : true;
keep_running: {
// Elevate priority when Home launches for the first time to avoid
// starving at boot time. Staring at a blank home is not cool.
synchronized (mLock) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to " +
(mIsLaunching ? "DEFAULT" : "BACKGROUND"));
android.os.Process.setThreadPriority(mIsLaunching
? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND);
}
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: loading workspace");
loadAndBindWorkspace();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 1: special: loading all apps");
loadAndBindAllApps();
}
if (mStopped) {
break keep_running;
}
// Whew! Hard work done. Slow us down, and wait until the UI thread has
// settled down.
synchronized (mLock) {
if (mIsLaunching) {
if (DEBUG_LOADERS) Log.d(TAG, "Setting thread priority to BACKGROUND");
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
}
}
waitForIdle();
// second step
if (loadWorkspaceFirst) {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: loading all apps");
loadAndBindAllApps();
} else {
if (DEBUG_LOADERS) Log.d(TAG, "step 2: special: loading workspace");
loadAndBindWorkspace();
}
// Restore the default thread priority after we are done loading items
synchronized (mLock) {
android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_DEFAULT);
}
}
// Update the saved icons if necessary
if (DEBUG_LOADERS) Log.d(TAG, "Comparing loaded icons to database icons");
for (Object key : sDbIconCache.keySet()) {
updateSavedIcon(mContext, (ShortcutInfo) key, sDbIconCache.get(key));
}
sDbIconCache.clear();
// Clear out this reference, otherwise we end up holding it until all of the
// callback runnables are done.
mContext = null;
synchronized (mLock) {
// If we are still the last one to be scheduled, remove ourselves.
if (mLoaderTask == this) {
mLoaderTask = null;
}
}
}
public void stopLocked() {
synchronized (LoaderTask.this) {
mStopped = true;
this.notify();
}
}
/**
* Gets the callbacks object. If we've been stopped, or if the launcher object
* has somehow been garbage collected, return null instead. Pass in the Callbacks
* object that was around when the deferred message was scheduled, and if there's
* a new Callbacks object around then also return null. This will save us from
* calling onto it with data that will be ignored.
*/
Callbacks tryGetCallbacks(Callbacks oldCallbacks) {
synchronized (mLock) {
if (mStopped) {
return null;
}
if (mCallbacks == null) {
return null;
}
final Callbacks callbacks = mCallbacks.get();
if (callbacks != oldCallbacks) {
return null;
}
if (callbacks == null) {
Log.w(TAG, "no mCallbacks");
return null;
}
return callbacks;
}
}
// check & update map of what's occupied; used to discard overlapping/invalid items
private boolean checkItemPlacement(ItemInfo occupied[][][], ItemInfo item) {
int containerIndex = item.screen;
if (item.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
// Return early if we detect that an item is under the hotseat button
if (mCallbacks == null || mCallbacks.get().isAllAppsButtonRank(item.screen)) {
return false;
}
// We use the last index to refer to the hotseat and the screen as the rank, so
// test and update the occupied state accordingly
if (occupied[Launcher.SCREEN_COUNT][item.screen][0] != null) {
Log.e(TAG, "Error loading shortcut into hotseat " + item
+ " into position (" + item.screen + ":" + item.cellX + "," + item.cellY
+ ") occupied by " + occupied[Launcher.SCREEN_COUNT][item.screen][0]);
return false;
} else {
occupied[Launcher.SCREEN_COUNT][item.screen][0] = item;
return true;
}
} else if (item.container != LauncherSettings.Favorites.CONTAINER_DESKTOP) {
// Skip further checking if it is not the hotseat or workspace container
return true;
}
// Check if any workspace icons overlap with each other
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
if (occupied[containerIndex][x][y] != null) {
Log.e(TAG, "Error loading shortcut " + item
+ " into cell (" + containerIndex + "-" + item.screen + ":"
+ x + "," + y
+ ") occupied by "
+ occupied[containerIndex][x][y]);
return false;
}
}
}
for (int x = item.cellX; x < (item.cellX+item.spanX); x++) {
for (int y = item.cellY; y < (item.cellY+item.spanY); y++) {
occupied[containerIndex][x][y] = item;
}
}
return true;
}
private void loadWorkspace() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
final Context context = mContext;
final ContentResolver contentResolver = context.getContentResolver();
final PackageManager manager = context.getPackageManager();
final AppWidgetManager widgets = AppWidgetManager.getInstance(context);
final boolean isSafeMode = manager.isSafeMode();
// Make sure the default workspace is loaded, if needed
mApp.getLauncherProvider().loadDefaultFavoritesIfNecessary();
sWorkspaceItems.clear();
sAppWidgets.clear();
sFolders.clear();
sItemsIdMap.clear();
sDbIconCache.clear();
final ArrayList<Long> itemsToRemove = new ArrayList<Long>();
final Cursor c = contentResolver.query(
LauncherSettings.Favorites.CONTENT_URI, null, null, null, null);
// +1 for the hotseat (it can be larger than the workspace)
// Load workspace in reverse order to ensure that latest items are loaded first (and
// before any earlier duplicates)
final ItemInfo occupied[][][] =
new ItemInfo[Launcher.SCREEN_COUNT + 1][mCellCountX + 1][mCellCountY + 1];
try {
final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID);
final int intentIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.INTENT);
final int titleIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.TITLE);
final int iconTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_TYPE);
final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON);
final int iconPackageIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_PACKAGE);
final int iconResourceIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ICON_RESOURCE);
final int containerIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.CONTAINER);
final int itemTypeIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.ITEM_TYPE);
final int appWidgetIdIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.APPWIDGET_ID);
final int screenIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SCREEN);
final int cellXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLX);
final int cellYIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.CELLY);
final int spanXIndex = c.getColumnIndexOrThrow
(LauncherSettings.Favorites.SPANX);
final int spanYIndex = c.getColumnIndexOrThrow(
LauncherSettings.Favorites.SPANY);
//final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI);
//final int displayModeIndex = c.getColumnIndexOrThrow(
// LauncherSettings.Favorites.DISPLAY_MODE);
ShortcutInfo info;
String intentDescription;
LauncherAppWidgetInfo appWidgetInfo;
int container;
long id;
Intent intent;
while (!mStopped && c.moveToNext()) {
try {
int itemType = c.getInt(itemTypeIndex);
switch (itemType) {
case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION:
case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT:
intentDescription = c.getString(intentIndex);
try {
intent = Intent.parseUri(intentDescription, 0);
} catch (URISyntaxException e) {
continue;
}
if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) {
info = getShortcutInfo(manager, intent, context, c, iconIndex,
titleIndex, mLabelCache);
} else {
info = getShortcutInfo(c, context, iconTypeIndex,
iconPackageIndex, iconResourceIndex, iconIndex,
titleIndex);
// App shortcuts that used to be automatically added to Launcher
// didn't always have the correct intent flags set, so do that here
if (intent.getAction() != null &&
intent.getCategories() != null &&
intent.getAction().equals(Intent.ACTION_MAIN) &&
intent.getCategories().contains(Intent.CATEGORY_LAUNCHER)) {
intent.addFlags(
Intent.FLAG_ACTIVITY_NEW_TASK |
Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
}
}
if (info != null) {
info.intent = intent;
info.id = c.getLong(idIndex);
container = c.getInt(containerIndex);
info.container = container;
info.screen = c.getInt(screenIndex);
info.cellX = c.getInt(cellXIndex);
info.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, info)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(info);
break;
default:
// Item is in a user folder
FolderInfo folderInfo =
findOrMakeFolder(sFolders, container);
folderInfo.add(info);
break;
}
sItemsIdMap.put(info.id, info);
// now that we've loaded everthing re-save it with the
// icon in case it disappears somehow.
queueIconToBeChecked(sDbIconCache, info, c, iconIndex);
} else {
// Failed to load the shortcut, probably because the
// activity manager couldn't resolve it (maybe the app
// was uninstalled), or the db row was somehow screwed up.
// Delete it.
id = c.getLong(idIndex);
Log.e(TAG, "Error loading shortcut " + id + ", removing it");
contentResolver.delete(LauncherSettings.Favorites.getContentUri(
id, false), null, null);
}
break;
case LauncherSettings.Favorites.ITEM_TYPE_FOLDER:
id = c.getLong(idIndex);
FolderInfo folderInfo = findOrMakeFolder(sFolders, id);
folderInfo.title = c.getString(titleIndex);
folderInfo.id = id;
container = c.getInt(containerIndex);
folderInfo.container = container;
folderInfo.screen = c.getInt(screenIndex);
folderInfo.cellX = c.getInt(cellXIndex);
folderInfo.cellY = c.getInt(cellYIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, folderInfo)) {
break;
}
switch (container) {
case LauncherSettings.Favorites.CONTAINER_DESKTOP:
case LauncherSettings.Favorites.CONTAINER_HOTSEAT:
sWorkspaceItems.add(folderInfo);
break;
}
sItemsIdMap.put(folderInfo.id, folderInfo);
sFolders.put(folderInfo.id, folderInfo);
break;
case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET:
// Read all Launcher-specific widget details
int appWidgetId = c.getInt(appWidgetIdIndex);
id = c.getLong(idIndex);
final AppWidgetProviderInfo provider =
widgets.getAppWidgetInfo(appWidgetId);
if (!isSafeMode && (provider == null || provider.provider == null ||
provider.provider.getPackageName() == null)) {
String log = "Deleting widget that isn't installed anymore: id="
+ id + " appWidgetId=" + appWidgetId;
Log.e(TAG, log);
Launcher.sDumpLogs.add(log);
itemsToRemove.add(id);
} else {
appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId,
provider.provider);
appWidgetInfo.id = id;
appWidgetInfo.screen = c.getInt(screenIndex);
appWidgetInfo.cellX = c.getInt(cellXIndex);
appWidgetInfo.cellY = c.getInt(cellYIndex);
appWidgetInfo.spanX = c.getInt(spanXIndex);
appWidgetInfo.spanY = c.getInt(spanYIndex);
int[] minSpan = Launcher.getMinSpanForWidget(context, provider);
appWidgetInfo.minSpanX = minSpan[0];
appWidgetInfo.minSpanY = minSpan[1];
container = c.getInt(containerIndex);
if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP &&
container != LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
Log.e(TAG, "Widget found where container "
+ "!= CONTAINER_DESKTOP nor CONTAINER_HOTSEAT - ignoring!");
continue;
}
appWidgetInfo.container = c.getInt(containerIndex);
// check & update map of what's occupied
if (!checkItemPlacement(occupied, appWidgetInfo)) {
break;
}
sItemsIdMap.put(appWidgetInfo.id, appWidgetInfo);
sAppWidgets.add(appWidgetInfo);
}
break;
}
} catch (Exception e) {
Log.w(TAG, "Desktop items loading interrupted:", e);
}
}
} finally {
c.close();
}
if (itemsToRemove.size() > 0) {
ContentProviderClient client = contentResolver.acquireContentProviderClient(
LauncherSettings.Favorites.CONTENT_URI);
// Remove dead items
for (long id : itemsToRemove) {
if (DEBUG_LOADERS) {
Log.d(TAG, "Removed id = " + id);
}
// Don't notify content observers
try {
client.delete(LauncherSettings.Favorites.getContentUri(id, false),
null, null);
} catch (RemoteException e) {
Log.w(TAG, "Could not remove id = " + id);
}
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms");
Log.d(TAG, "workspace layout: ");
for (int y = 0; y < mCellCountY; y++) {
String line = "";
for (int s = 0; s < Launcher.SCREEN_COUNT; s++) {
if (s > 0) {
line += " | ";
}
for (int x = 0; x < mCellCountX; x++) {
line += ((occupied[s][x][y] != null) ? "#" : ".");
}
}
Log.d(TAG, "[ " + line + " ]");
}
}
}
/**
* Read everything out of our database.
*/
private void bindWorkspace() {
final long t = SystemClock.uptimeMillis();
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher");
return;
}
// Get the list of workspace items to load and unbind the existing ShortcutInfos
// before we call startBinding() below.
final int currentScreen = oldCallbacks.getCurrentWorkspaceScreen();
final ArrayList<ItemInfo> tmpWorkspaceItems = unbindWorkspaceItemsOnMainThread();
// Order the items for loading as follows: current workspace, hotseat, everything else
Collections.sort(tmpWorkspaceItems, new Comparator<ItemInfo>() {
@Override
public int compare(ItemInfo lhs, ItemInfo rhs) {
int cellCountX = LauncherModel.getCellCountX();
int cellCountY = LauncherModel.getCellCountY();
int screenOffset = cellCountX * cellCountY;
int containerOffset = screenOffset * (Launcher.SCREEN_COUNT + 1); // +1 hotseat
long lr = (lhs.container * containerOffset + lhs.screen * screenOffset +
lhs.cellY * cellCountX + lhs.cellX);
long rr = (rhs.container * containerOffset + rhs.screen * screenOffset +
rhs.cellY * cellCountX + rhs.cellX);
return (int) (lr - rr);
}
});
// Precondition: the items are ordered by page, screen
final ArrayList<ItemInfo> workspaceItems = new ArrayList<ItemInfo>();
for (ItemInfo ii : tmpWorkspaceItems) {
// Prepend the current items, hotseat items, append everything else
if (ii.container == LauncherSettings.Favorites.CONTAINER_DESKTOP &&
ii.screen == currentScreen) {
workspaceItems.add(0, ii);
} else if (ii.container == LauncherSettings.Favorites.CONTAINER_HOTSEAT) {
workspaceItems.add(0, ii);
} else {
workspaceItems.add(ii);
}
}
// Tell the workspace that we're about to start firing items at it
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.startBinding();
}
}
});
// Add the items to the workspace.
int N = workspaceItems.size();
for (int i=0; i<N; i+=ITEMS_CHUNK) {
final int start = i;
final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindItems(workspaceItems, start, start+chunkSize);
}
}
});
}
// Ensure that we don't use the same folders data structure on the main thread
final HashMap<Long, FolderInfo> folders = new HashMap<Long, FolderInfo>(sFolders);
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindFolders(folders);
}
}
});
// Wait until the queue goes empty.
mHandler.post(new Runnable() {
public void run() {
if (DEBUG_LOADERS) {
Log.d(TAG, "Going to start binding widgets soon.");
}
}
});
// Bind the widgets, one at a time.
// WARNING: this is calling into the workspace from the background thread,
// but since getCurrentScreen() just returns the int, we should be okay. This
// is just a hint for the order, and if it's wrong, we'll be okay.
// TODO: instead, we should have that push the current screen into here.
N = sAppWidgets.size();
// once for the current screen
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen == currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// once for the other screens
for (int i=0; i<N; i++) {
final LauncherAppWidgetInfo widget = sAppWidgets.get(i);
if (widget.screen != currentScreen) {
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAppWidget(widget);
}
}
});
}
}
// Tell the workspace that we're done.
mHandler.post(new Runnable() {
public void run() {
Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.finishBindingItems();
}
}
});
// Cleanup
mHandler.post(new Runnable() {
public void run() {
// If we're profiling, ensure this is the last thing in the queue.
if (DEBUG_LOADERS) {
Log.d(TAG, "bound workspace in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
mIsLoadingAndBindingWorkspace = false;
}
});
}
private void loadAndBindAllApps() {
if (DEBUG_LOADERS) {
Log.d(TAG, "loadAndBindAllApps mAllAppsLoaded=" + mAllAppsLoaded);
}
if (!mAllAppsLoaded) {
loadAllAppsByBatch();
synchronized (LoaderTask.this) {
if (mStopped) {
return;
}
mAllAppsLoaded = true;
}
} else {
onlyBindAllApps();
}
}
private void onlyBindAllApps() {
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (onlyBindAllApps)");
return;
}
// shallow copy
@SuppressWarnings("unchecked")
final ArrayList<ApplicationInfo> list
= (ArrayList<ApplicationInfo>) mAllAppsList.data.clone();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
if (callbacks != null) {
callbacks.bindAllApplications(list);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound all " + list.size() + " apps from cache in "
+ (SystemClock.uptimeMillis()-t) + "ms");
}
}
});
}
private void loadAllAppsByBatch() {
final long t = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
// Don't use these two variables in any of the callback runnables.
// Otherwise we hold a reference to them.
final Callbacks oldCallbacks = mCallbacks.get();
if (oldCallbacks == null) {
// This launcher has exited and nobody bothered to tell us. Just bail.
Log.w(TAG, "LoaderTask running with no launcher (loadAllAppsByBatch)");
return;
}
final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);
final PackageManager packageManager = mContext.getPackageManager();
List<ResolveInfo> apps = null;
int N = Integer.MAX_VALUE;
int startIndex;
int i=0;
int batchSize = -1;
while (i < N && !mStopped) {
if (i == 0) {
mAllAppsList.clear();
final long qiaTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
apps = packageManager.queryIntentActivities(mainIntent, 0);
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities took "
+ (SystemClock.uptimeMillis()-qiaTime) + "ms");
}
if (apps == null) {
return;
}
N = apps.size();
if (DEBUG_LOADERS) {
Log.d(TAG, "queryIntentActivities got " + N + " apps");
}
if (N == 0) {
// There are no apps?!?
return;
}
if (mBatchSize == 0) {
batchSize = N;
} else {
batchSize = mBatchSize;
}
final long sortTime = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
Collections.sort(apps,
new LauncherModel.ShortcutNameComparator(packageManager, mLabelCache));
if (DEBUG_LOADERS) {
Log.d(TAG, "sort took "
+ (SystemClock.uptimeMillis()-sortTime) + "ms");
}
}
final long t2 = DEBUG_LOADERS ? SystemClock.uptimeMillis() : 0;
startIndex = i;
for (int j=0; i<N && j<batchSize; j++) {
// This builds the icon bitmaps.
mAllAppsList.add(new ApplicationInfo(packageManager, apps.get(i),
mIconCache, mLabelCache));
i++;
}
final boolean first = i <= batchSize;
final Callbacks callbacks = tryGetCallbacks(oldCallbacks);
final ArrayList<ApplicationInfo> added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
mHandler.post(new Runnable() {
public void run() {
final long t = SystemClock.uptimeMillis();
if (callbacks != null) {
if (first) {
callbacks.bindAllApplications(added);
} else {
callbacks.bindAppsAdded(added);
}
if (DEBUG_LOADERS) {
Log.d(TAG, "bound " + added.size() + " apps in "
+ (SystemClock.uptimeMillis() - t) + "ms");
}
} else {
Log.i(TAG, "not binding apps: no Launcher activity");
}
}
});
if (DEBUG_LOADERS) {
Log.d(TAG, "batch of " + (i-startIndex) + " icons processed in "
+ (SystemClock.uptimeMillis()-t2) + "ms");
}
if (mAllAppsLoadDelay > 0 && i < N) {
try {
if (DEBUG_LOADERS) {
Log.d(TAG, "sleeping for " + mAllAppsLoadDelay + "ms");
}
Thread.sleep(mAllAppsLoadDelay);
} catch (InterruptedException exc) { }
}
}
if (DEBUG_LOADERS) {
Log.d(TAG, "cached all " + N + " apps in "
+ (SystemClock.uptimeMillis()-t) + "ms"
+ (mAllAppsLoadDelay > 0 ? " (including delay)" : ""));
}
}
public void dumpState() {
Log.d(TAG, "mLoaderTask.mContext=" + mContext);
Log.d(TAG, "mLoaderTask.mWaitThread=" + mWaitThread);
Log.d(TAG, "mLoaderTask.mIsLaunching=" + mIsLaunching);
Log.d(TAG, "mLoaderTask.mStopped=" + mStopped);
Log.d(TAG, "mLoaderTask.mLoadAndBindStepFinished=" + mLoadAndBindStepFinished);
Log.d(TAG, "mItems size=" + sWorkspaceItems.size());
}
}
void enqueuePackageUpdated(PackageUpdatedTask task) {
sWorker.post(task);
}
private class PackageUpdatedTask implements Runnable {
int mOp;
String[] mPackages;
public static final int OP_NONE = 0;
public static final int OP_ADD = 1;
public static final int OP_UPDATE = 2;
public static final int OP_REMOVE = 3; // uninstlled
public static final int OP_UNAVAILABLE = 4; // external media unmounted
public PackageUpdatedTask(int op, String[] packages) {
mOp = op;
mPackages = packages;
}
public void run() {
final Context context = mApp;
final String[] packages = mPackages;
final int N = packages.length;
switch (mOp) {
case OP_ADD:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.addPackage " + packages[i]);
mAllAppsList.addPackage(context, packages[i]);
}
break;
case OP_UPDATE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.updatePackage " + packages[i]);
mAllAppsList.updatePackage(context, packages[i]);
}
break;
case OP_REMOVE:
case OP_UNAVAILABLE:
for (int i=0; i<N; i++) {
if (DEBUG_LOADERS) Log.d(TAG, "mAllAppsList.removePackage " + packages[i]);
mAllAppsList.removePackage(packages[i]);
}
break;
}
ArrayList<ApplicationInfo> added = null;
ArrayList<ApplicationInfo> removed = null;
ArrayList<ApplicationInfo> modified = null;
if (mAllAppsList.added.size() > 0) {
added = mAllAppsList.added;
mAllAppsList.added = new ArrayList<ApplicationInfo>();
}
if (mAllAppsList.removed.size() > 0) {
removed = mAllAppsList.removed;
mAllAppsList.removed = new ArrayList<ApplicationInfo>();
for (ApplicationInfo info: removed) {
mIconCache.remove(info.intent.getComponent());
}
}
if (mAllAppsList.modified.size() > 0) {
modified = mAllAppsList.modified;
mAllAppsList.modified = new ArrayList<ApplicationInfo>();
}
final Callbacks callbacks = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == null) {
Log.w(TAG, "Nobody to tell about the new app. Launcher is probably loading.");
return;
}
if (added != null) {
final ArrayList<ApplicationInfo> addedFinal = added;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsAdded(addedFinal);
}
}
});
}
if (modified != null) {
final ArrayList<ApplicationInfo> modifiedFinal = modified;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsUpdated(modifiedFinal);
}
}
});
}
if (removed != null) {
final boolean permanent = mOp != OP_UNAVAILABLE;
final ArrayList<ApplicationInfo> removedFinal = removed;
mHandler.post(new Runnable() {
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindAppsRemoved(removedFinal, permanent);
}
}
});
}
mHandler.post(new Runnable() {
@Override
public void run() {
Callbacks cb = mCallbacks != null ? mCallbacks.get() : null;
if (callbacks == cb && cb != null) {
callbacks.bindPackagesUpdated();
}
}
});
}
}
/**
* Returns all the Workspace ShortcutInfos associated with a particular package.
* @param intent
* @return
*/
ArrayList<ShortcutInfo> getShortcutInfosForPackage(String packageName) {
ArrayList<ShortcutInfo> infos = new ArrayList<ShortcutInfo>();
for (ItemInfo i : sWorkspaceItems) {
if (i instanceof ShortcutInfo) {
ShortcutInfo info = (ShortcutInfo) i;
if (packageName.equals(info.getPackageName())) {
infos.add(info);
}
}
}
return infos;
}
/**
* This is called from the code that adds shortcuts from the intent receiver. This
* doesn't have a Cursor, but
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context) {
return getShortcutInfo(manager, intent, context, null, -1, -1, null);
}
/**
* Make an ShortcutInfo object for a shortcut that is an application.
*
* If c is not null, then it will be used to fill in missing data like the title and icon.
*/
public ShortcutInfo getShortcutInfo(PackageManager manager, Intent intent, Context context,
Cursor c, int iconIndex, int titleIndex, HashMap<Object, CharSequence> labelCache) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
ComponentName componentName = intent.getComponent();
if (componentName == null) {
return null;
}
try {
PackageInfo pi = manager.getPackageInfo(componentName.getPackageName(), 0);
if (!pi.applicationInfo.enabled) {
// If we return null here, the corresponding item will be removed from the launcher
// db and will not appear in the workspace.
return null;
}
} catch (NameNotFoundException e) {
Log.d(TAG, "getPackInfo failed for package " + componentName.getPackageName());
}
// TODO: See if the PackageManager knows about this case. If it doesn't
// then return null & delete this.
// the resource -- This may implicitly give us back the fallback icon,
// but don't worry about that. All we're doing with usingFallbackIcon is
// to avoid saving lots of copies of that in the database, and most apps
// have icons anyway.
// Attempt to use queryIntentActivities to get the ResolveInfo (with IntentFilter info) and
// if that fails, or is ambiguious, fallback to the standard way of getting the resolve info
// via resolveActivity().
ResolveInfo resolveInfo = null;
ComponentName oldComponent = intent.getComponent();
Intent newIntent = new Intent(intent.getAction(), null);
newIntent.addCategory(Intent.CATEGORY_LAUNCHER);
newIntent.setPackage(oldComponent.getPackageName());
List<ResolveInfo> infos = manager.queryIntentActivities(newIntent, 0);
for (ResolveInfo i : infos) {
ComponentName cn = new ComponentName(i.activityInfo.packageName,
i.activityInfo.name);
if (cn.equals(oldComponent)) {
resolveInfo = i;
}
}
if (resolveInfo == null) {
resolveInfo = manager.resolveActivity(intent, 0);
}
if (resolveInfo != null) {
icon = mIconCache.getIcon(componentName, resolveInfo, labelCache);
}
// the db
if (icon == null) {
if (c != null) {
icon = getIconFromCursor(c, iconIndex, context);
}
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
info.setIcon(icon);
// from the resource
if (resolveInfo != null) {
ComponentName key = LauncherModel.getComponentNameFromResolveInfo(resolveInfo);
if (labelCache != null && labelCache.containsKey(key)) {
info.title = labelCache.get(key);
} else {
info.title = resolveInfo.activityInfo.loadLabel(manager);
if (labelCache != null) {
labelCache.put(key, info.title);
}
}
}
// from the db
if (info.title == null) {
if (c != null) {
info.title = c.getString(titleIndex);
}
}
// fall back to the class name of the activity
if (info.title == null) {
info.title = componentName.getClassName();
}
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION;
return info;
}
/**
* Make an ShortcutInfo object for a shortcut that isn't an application.
*/
private ShortcutInfo getShortcutInfo(Cursor c, Context context,
int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex,
int titleIndex) {
Bitmap icon = null;
final ShortcutInfo info = new ShortcutInfo();
info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT;
// TODO: If there's an explicit component and we can't install that, delete it.
info.title = c.getString(titleIndex);
int iconType = c.getInt(iconTypeIndex);
switch (iconType) {
case LauncherSettings.Favorites.ICON_TYPE_RESOURCE:
String packageName = c.getString(iconPackageIndex);
String resourceName = c.getString(iconResourceIndex);
PackageManager packageManager = context.getPackageManager();
info.customIcon = false;
// the resource
try {
Resources resources = packageManager.getResourcesForApplication(packageName);
if (resources != null) {
final int id = resources.getIdentifier(resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
}
} catch (Exception e) {
// drop this. we have other places to look for icons
}
// the db
if (icon == null) {
icon = getIconFromCursor(c, iconIndex, context);
}
// the fallback icon
if (icon == null) {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
break;
case LauncherSettings.Favorites.ICON_TYPE_BITMAP:
icon = getIconFromCursor(c, iconIndex, context);
if (icon == null) {
icon = getFallbackIcon();
info.customIcon = false;
info.usingFallbackIcon = true;
} else {
info.customIcon = true;
}
break;
default:
icon = getFallbackIcon();
info.usingFallbackIcon = true;
info.customIcon = false;
break;
}
info.setIcon(icon);
return info;
}
Bitmap getIconFromCursor(Cursor c, int iconIndex, Context context) {
@SuppressWarnings("all") // suppress dead code warning
final boolean debug = false;
if (debug) {
Log.d(TAG, "getIconFromCursor app="
+ c.getString(c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE)));
}
byte[] data = c.getBlob(iconIndex);
try {
return Utilities.createIconBitmap(
BitmapFactory.decodeByteArray(data, 0, data.length), context);
} catch (Exception e) {
return null;
}
}
ShortcutInfo addShortcut(Context context, Intent data, long container, int screen,
int cellX, int cellY, boolean notify) {
final ShortcutInfo info = infoFromShortcutIntent(context, data, null);
if (info == null) {
return null;
}
addItemToDatabase(context, info, container, screen, cellX, cellY, notify);
return info;
}
/**
* Attempts to find an AppWidgetProviderInfo that matches the given component.
*/
AppWidgetProviderInfo findAppWidgetProviderInfoWithComponent(Context context,
ComponentName component) {
List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
for (AppWidgetProviderInfo info : widgets) {
if (info.provider.equals(component)) {
return info;
}
}
return null;
}
/**
* Returns a list of all the widgets that can handle configuration with a particular mimeType.
*/
List<WidgetMimeTypeHandlerData> resolveWidgetsForMimeType(Context context, String mimeType) {
final PackageManager packageManager = context.getPackageManager();
final List<WidgetMimeTypeHandlerData> supportedConfigurationActivities =
new ArrayList<WidgetMimeTypeHandlerData>();
final Intent supportsIntent =
new Intent(InstallWidgetReceiver.ACTION_SUPPORTS_CLIPDATA_MIMETYPE);
supportsIntent.setType(mimeType);
// Create a set of widget configuration components that we can test against
final List<AppWidgetProviderInfo> widgets =
AppWidgetManager.getInstance(context).getInstalledProviders();
final HashMap<ComponentName, AppWidgetProviderInfo> configurationComponentToWidget =
new HashMap<ComponentName, AppWidgetProviderInfo>();
for (AppWidgetProviderInfo info : widgets) {
configurationComponentToWidget.put(info.configure, info);
}
// Run through each of the intents that can handle this type of clip data, and cross
// reference them with the components that are actual configuration components
final List<ResolveInfo> activities = packageManager.queryIntentActivities(supportsIntent,
PackageManager.MATCH_DEFAULT_ONLY);
for (ResolveInfo info : activities) {
final ActivityInfo activityInfo = info.activityInfo;
final ComponentName infoComponent = new ComponentName(activityInfo.packageName,
activityInfo.name);
if (configurationComponentToWidget.containsKey(infoComponent)) {
supportedConfigurationActivities.add(
new InstallWidgetReceiver.WidgetMimeTypeHandlerData(info,
configurationComponentToWidget.get(infoComponent)));
}
}
return supportedConfigurationActivities;
}
ShortcutInfo infoFromShortcutIntent(Context context, Intent data, Bitmap fallbackIcon) {
Intent intent = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_INTENT);
String name = data.getStringExtra(Intent.EXTRA_SHORTCUT_NAME);
Parcelable bitmap = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON);
if (intent == null) {
// If the intent is null, we can't construct a valid ShortcutInfo, so we return null
Log.e(TAG, "Can't construct ShorcutInfo with null intent");
return null;
}
Bitmap icon = null;
boolean customIcon = false;
ShortcutIconResource iconResource = null;
if (bitmap != null && bitmap instanceof Bitmap) {
icon = Utilities.createIconBitmap(new FastBitmapDrawable((Bitmap)bitmap), context);
customIcon = true;
} else {
Parcelable extra = data.getParcelableExtra(Intent.EXTRA_SHORTCUT_ICON_RESOURCE);
if (extra != null && extra instanceof ShortcutIconResource) {
try {
iconResource = (ShortcutIconResource) extra;
final PackageManager packageManager = context.getPackageManager();
Resources resources = packageManager.getResourcesForApplication(
iconResource.packageName);
final int id = resources.getIdentifier(iconResource.resourceName, null, null);
icon = Utilities.createIconBitmap(
mIconCache.getFullResIcon(resources, id), context);
} catch (Exception e) {
Log.w(TAG, "Could not load shortcut icon: " + extra);
}
}
}
final ShortcutInfo info = new ShortcutInfo();
if (icon == null) {
if (fallbackIcon != null) {
icon = fallbackIcon;
} else {
icon = getFallbackIcon();
info.usingFallbackIcon = true;
}
}
info.setIcon(icon);
info.title = name;
info.intent = intent;
info.customIcon = customIcon;
info.iconResource = iconResource;
return info;
}
boolean queueIconToBeChecked(HashMap<Object, byte[]> cache, ShortcutInfo info, Cursor c,
int iconIndex) {
// If apps can't be on SD, don't even bother.
if (!mAppsCanBeOnExternalStorage) {
return false;
}
// If this icon doesn't have a custom icon, check to see
// what's stored in the DB, and if it doesn't match what
// we're going to show, store what we are going to show back
// into the DB. We do this so when we're loading, if the
// package manager can't find an icon (for example because
// the app is on SD) then we can use that instead.
if (!info.customIcon && !info.usingFallbackIcon) {
cache.put(info, c.getBlob(iconIndex));
return true;
}
return false;
}
void updateSavedIcon(Context context, ShortcutInfo info, byte[] data) {
boolean needSave = false;
try {
if (data != null) {
Bitmap saved = BitmapFactory.decodeByteArray(data, 0, data.length);
Bitmap loaded = info.getIcon(mIconCache);
needSave = !saved.sameAs(loaded);
} else {
needSave = true;
}
} catch (Exception e) {
needSave = true;
}
if (needSave) {
Log.d(TAG, "going to save icon bitmap for info=" + info);
// This is slower than is ideal, but this only happens once
// or when the app is updated with a new icon.
updateItemInDatabase(context, info);
}
}
/**
* Return an existing FolderInfo object if we have encountered this ID previously,
* or make a new one.
*/
private static FolderInfo findOrMakeFolder(HashMap<Long, FolderInfo> folders, long id) {
// See if a placeholder was created for us already
FolderInfo folderInfo = folders.get(id);
if (folderInfo == null) {
// No placeholder -- create a new instance
folderInfo = new FolderInfo();
folders.put(id, folderInfo);
}
return folderInfo;
}
private static final Collator sCollator = Collator.getInstance();
public static final Comparator<ApplicationInfo> APP_NAME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
int result = sCollator.compare(a.title.toString(), b.title.toString());
if (result == 0) {
result = a.componentName.compareTo(b.componentName);
}
return result;
}
};
public static final Comparator<ApplicationInfo> APP_INSTALL_TIME_COMPARATOR
= new Comparator<ApplicationInfo>() {
public final int compare(ApplicationInfo a, ApplicationInfo b) {
if (a.firstInstallTime < b.firstInstallTime) return 1;
if (a.firstInstallTime > b.firstInstallTime) return -1;
return 0;
}
};
public static final Comparator<AppWidgetProviderInfo> WIDGET_NAME_COMPARATOR
= new Comparator<AppWidgetProviderInfo>() {
public final int compare(AppWidgetProviderInfo a, AppWidgetProviderInfo b) {
return sCollator.compare(a.label.toString(), b.label.toString());
}
};
static ComponentName getComponentNameFromResolveInfo(ResolveInfo info) {
if (info.activityInfo != null) {
return new ComponentName(info.activityInfo.packageName, info.activityInfo.name);
} else {
return new ComponentName(info.serviceInfo.packageName, info.serviceInfo.name);
}
}
public static class ShortcutNameComparator implements Comparator<ResolveInfo> {
private PackageManager mPackageManager;
private HashMap<Object, CharSequence> mLabelCache;
ShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, CharSequence>();
}
ShortcutNameComparator(PackageManager pm, HashMap<Object, CharSequence> labelCache) {
mPackageManager = pm;
mLabelCache = labelCache;
}
public final int compare(ResolveInfo a, ResolveInfo b) {
CharSequence labelA, labelB;
ComponentName keyA = LauncherModel.getComponentNameFromResolveInfo(a);
ComponentName keyB = LauncherModel.getComponentNameFromResolveInfo(b);
if (mLabelCache.containsKey(keyA)) {
labelA = mLabelCache.get(keyA);
} else {
labelA = a.loadLabel(mPackageManager).toString();
mLabelCache.put(keyA, labelA);
}
if (mLabelCache.containsKey(keyB)) {
labelB = mLabelCache.get(keyB);
} else {
labelB = b.loadLabel(mPackageManager).toString();
mLabelCache.put(keyB, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public static class WidgetAndShortcutNameComparator implements Comparator<Object> {
private PackageManager mPackageManager;
private HashMap<Object, String> mLabelCache;
WidgetAndShortcutNameComparator(PackageManager pm) {
mPackageManager = pm;
mLabelCache = new HashMap<Object, String>();
}
public final int compare(Object a, Object b) {
String labelA, labelB;
if (mLabelCache.containsKey(a)) {
labelA = mLabelCache.get(a);
} else {
labelA = (a instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) a).label :
((ResolveInfo) a).loadLabel(mPackageManager).toString();
mLabelCache.put(a, labelA);
}
if (mLabelCache.containsKey(b)) {
labelB = mLabelCache.get(b);
} else {
labelB = (b instanceof AppWidgetProviderInfo) ?
((AppWidgetProviderInfo) b).label :
((ResolveInfo) b).loadLabel(mPackageManager).toString();
mLabelCache.put(b, labelB);
}
return sCollator.compare(labelA, labelB);
}
};
public void dumpState() {
Log.d(TAG, "mCallbacks=" + mCallbacks);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.data", mAllAppsList.data);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.added", mAllAppsList.added);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.removed", mAllAppsList.removed);
ApplicationInfo.dumpApplicationInfoList(TAG, "mAllAppsList.modified", mAllAppsList.modified);
if (mLoaderTask != null) {
mLoaderTask.dumpState();
} else {
Log.d(TAG, "mLoaderTask=null");
}
}
}
|
diff --git a/demoiselle-ldap/src/main/java/br/gov/frameworkdemoiselle/ldap/template/LDAPCrud.java b/demoiselle-ldap/src/main/java/br/gov/frameworkdemoiselle/ldap/template/LDAPCrud.java
index a670882..906d8db 100644
--- a/demoiselle-ldap/src/main/java/br/gov/frameworkdemoiselle/ldap/template/LDAPCrud.java
+++ b/demoiselle-ldap/src/main/java/br/gov/frameworkdemoiselle/ldap/template/LDAPCrud.java
@@ -1,242 +1,242 @@
/*
* Demoiselle Framework
* Copyright (C) 2010 SERPRO
* Copyright (c) 2012 - Reinaldo de Carvalho <[email protected]>
* ----------------------------------------------------------------------------
* This file is part of Demoiselle Framework.
*
* Demoiselle Framework is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License version 3
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License version 3
* along with this program; if not, see <http://www.gnu.org/licenses/>
* or write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301, USA.
* ----------------------------------------------------------------------------
* Este arquivo é parte do Framework Demoiselle.
*
* O Framework Demoiselle é um software livre; você pode redistribuí-lo e/ou
* modificá-lo dentro dos termos da GNU LGPL versão 3 como publicada pela Fundação
* do Software Livre (FSF).
*
* Este programa é distribuído na esperança que possa ser útil, mas SEM NENHUMA
* GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a qualquer MERCADO ou
* APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU/LGPL em português
* para maiores detalhes.
*
* Você deve ter recebido uma cópia da GNU LGPL versão 3, sob o título
* "LICENCA.txt", junto com esse programa. Se não, acesse <http://www.gnu.org/licenses/>
* ou escreva para a Fundação do Software Livre (FSF) Inc.,
* 51 Franklin St, Fifth Floor, Boston, MA 02111-1301, USA.
*/
package br.gov.frameworkdemoiselle.ldap.template;
import java.util.List;
import java.util.Map;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import br.gov.frameworkdemoiselle.enumeration.contrib.Logic;
import br.gov.frameworkdemoiselle.enumeration.contrib.Comparison;
import br.gov.frameworkdemoiselle.ldap.core.EntryManager;
import br.gov.frameworkdemoiselle.ldap.core.EntryQuery;
import br.gov.frameworkdemoiselle.query.contrib.QueryConfig;
import br.gov.frameworkdemoiselle.query.contrib.QueryContext;
import br.gov.frameworkdemoiselle.template.Crud;
import br.gov.frameworkdemoiselle.util.Reflections;
/**
* LDAP specific implementation for Crud interface.
*
* @param <T>
* bean object type
* @param <I>
* bean id type
* @author SERPRO
* @see Crud
*/
public class LDAPCrud<T, I> implements Crud<T, I> {
private static final long serialVersionUID = 1L;
@Inject
private EntryManager entryManager;
@Inject
private Instance<QueryContext> queryContext;
private QueryConfig<T> queryConfig;
private Class<T> beanClass;
protected Class<T> getBeanClass() {
if (this.beanClass == null) {
this.beanClass = Reflections.getGenericTypeArgument(this.getClass(), 0);
}
return this.beanClass;
}
protected EntryManager getEntryManager() {
return this.entryManager;
}
protected EntryQuery createQuery(final String ql) {
return getEntryManager().createQuery(ql);
}
protected QueryConfig<T> getQueryConfig() {
if (queryConfig == null) {
QueryContext context = queryContext.get();
queryConfig = context.getQueryConfig(getBeanClass());
}
return queryConfig;
}
public void insert(final T entity) {
getEntryManager().persist(entity);
}
public void delete(final I id) {
T entry = getEntryManager().getReference(getBeanClass(), id);
getEntryManager().remove(entry);
}
public void update(final T entity) {
getEntryManager().merge(entity);
}
public T load(final I id) {
return getEntryManager().find(getBeanClass(), id);
}
@SuppressWarnings("unchecked")
public List<T> findAll() {
String filter = "objectClass=" + getBeanClass().getSimpleName();
final QueryConfig<T> queryConfig = this.getQueryConfig();
if (queryConfig != null)
if (queryConfig.getFilter() != null && !queryConfig.getFilter().isEmpty())
filter = getFilter(getBeanClass(), queryConfig.getFilter(), queryConfig.getFilterLogic(), queryConfig.getFilterComparison());
EntryQuery query = getEntryManager().createQuery(filter);
if (queryConfig != null) {
// TODO: implement pagination with LDAP Asynchronous Query
// queryConfig.setTotalResults(countAll(queryConfig));
if (queryConfig.getMaxResults() > 0) {
// query.setFirstResult(queryConfig.getFirstResult());
- query.setBaseDN((String) queryConfig.getGeneric());
query.setMaxResults(queryConfig.getMaxResults());
}
+ query.setBaseDN((String) queryConfig.getGeneric());
}
return query.getResultList();
}
/**
* Create a filter like
* "(&(objectClass=className)(attribute1=value1)(attribute2=value2))" or
* "(&(objectClass=className)(|(attribute1=value1)(attribute2=value2)))"
*
* @param clazz
* a class with objectClass as name
* @param map
* a map with attributes as key and attributes values as Object.
* Object can be null for "(attribute=*)", or a .toString capable
* object like Integer or Long for "(attribute=value)", or can be
* a array, for "(attribute=value1)(attribute=value2)"
* @param logic
* if AND or OR result "(attribute=value)", otherwise is NAND or
* NOR that means "(!(attribute=value))"
* @param notation
* if INFIX result "(attribute=*value*)", else if PREFIX
* "(attribute=value*)", else if POSTFIX "(attribute=value*)",
* otherwise is EXACT "(attribute=value)"
* @return a filter like
* "(&(objectClass=className)(attribute1=value1)(attribute2=value2))"
* or
* "(&(objectClass=className)(|(attribute1=value1)(attribute2=value2)))"
*/
public static String getFilter(Class<?> clazz, Map<String, Object> map, Logic logic, Comparison notation) {
if (logic == Logic.AND || logic == Logic.NAND)
return "(&(objectClass=" + clazz.getSimpleName() + ")" + getPartialFilter(map, logic, notation) + ")";
else
return "(&(objectClass=" + clazz.getSimpleName() + ")(|" + getPartialFilter(map, logic, notation) + "))";
}
/**
* Create a partial filter like "(attribute1=value1)(attribute2=value2)" or
* "(!(attribute1=value1))(!(attribute2=value2))".
*
* @param map
* a map with attributes as key and attributes values as Object.
* Object can be null for "(attribute=*)", or a .toString capable
* object like Integer or Long for "(attribute=value)", or can be
* a array, for "(attribute=value1)(attribute=value2)"
* @param logic
* if AND or OR result "(attribute=value)", otherwise is NAND or
* NOR that means "(!(attribute=value))"
* @param notation
* if INFIX result "(attribute=*value*)", else if PREFIX
* "(attribute=value*)", else if POSTFIX "(attribute=value*)",
* otherwise is EXACT "(attribute=value)"
* @return a partial filter like "(attribute1=value1)(attribute2=value2)" or
* "(!(attribute1=value1))(!(attribute2=value2))".
*/
public static String getPartialFilter(Map<String, Object> map, Logic logic, Comparison notation) {
String partialFilter = "";
for (Map.Entry<String, Object> mapEntry : map.entrySet())
if (mapEntry.getValue() == null || !mapEntry.getValue().getClass().isArray())
partialFilter = partialFilter + getPartialFilterElement(mapEntry.getKey(), mapEntry.getValue(), logic, notation);
else
for (Object value : (Object[]) mapEntry.getValue())
partialFilter = partialFilter + getPartialFilterElement(mapEntry.getKey(), value, logic, notation);
return partialFilter;
}
/**
* Create a partial filter element like "(attribute=value)" or
* "(!(attribute=value))"
*
* @param attr
* attribute name
* @param value
* attribute value
* @param logic
* if AND or OR result "(attribute=value)", otherwise is NAND or
* NOR that means "(!(attribute=value))"
* @param notation
* if INFIX result "(attribute=*value*)", else if PREFIX
* "(attribute=value*)", else if POSTFIX "(attribute=value*)",
* otherwise is EXACT "(attribute=value)"
* @return a partial filter element like "(attribute=*value*)" or
* "(!(attribute=value))"
*/
public static String getPartialFilterElement(String attr, Object value, Logic logic, Comparison notation) {
String partialFilter;
if (value == null)
partialFilter = "(" + attr + "=*)";
else if (notation == Comparison.EQUALS)
partialFilter = "(" + attr + "=" + value + ")";
else if (notation == Comparison.CONTAINS)
partialFilter = "(" + attr + "=*" + value + "*)";
else if (notation == Comparison.STARTSWITH)
partialFilter = "(" + attr + "=" + value + "*)";
else
partialFilter = "(" + attr + "=*" + value + ")";
if (logic == Logic.NAND || logic == Logic.NOR)
partialFilter = "(!" + partialFilter + ")";
return partialFilter;
}
}
| false | true | public List<T> findAll() {
String filter = "objectClass=" + getBeanClass().getSimpleName();
final QueryConfig<T> queryConfig = this.getQueryConfig();
if (queryConfig != null)
if (queryConfig.getFilter() != null && !queryConfig.getFilter().isEmpty())
filter = getFilter(getBeanClass(), queryConfig.getFilter(), queryConfig.getFilterLogic(), queryConfig.getFilterComparison());
EntryQuery query = getEntryManager().createQuery(filter);
if (queryConfig != null) {
// TODO: implement pagination with LDAP Asynchronous Query
// queryConfig.setTotalResults(countAll(queryConfig));
if (queryConfig.getMaxResults() > 0) {
// query.setFirstResult(queryConfig.getFirstResult());
query.setBaseDN((String) queryConfig.getGeneric());
query.setMaxResults(queryConfig.getMaxResults());
}
}
return query.getResultList();
}
| public List<T> findAll() {
String filter = "objectClass=" + getBeanClass().getSimpleName();
final QueryConfig<T> queryConfig = this.getQueryConfig();
if (queryConfig != null)
if (queryConfig.getFilter() != null && !queryConfig.getFilter().isEmpty())
filter = getFilter(getBeanClass(), queryConfig.getFilter(), queryConfig.getFilterLogic(), queryConfig.getFilterComparison());
EntryQuery query = getEntryManager().createQuery(filter);
if (queryConfig != null) {
// TODO: implement pagination with LDAP Asynchronous Query
// queryConfig.setTotalResults(countAll(queryConfig));
if (queryConfig.getMaxResults() > 0) {
// query.setFirstResult(queryConfig.getFirstResult());
query.setMaxResults(queryConfig.getMaxResults());
}
query.setBaseDN((String) queryConfig.getGeneric());
}
return query.getResultList();
}
|
diff --git a/src/jramos/Main.java b/src/jramos/Main.java
index f5ecc87..a035d67 100644
--- a/src/jramos/Main.java
+++ b/src/jramos/Main.java
@@ -1,137 +1,137 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jramos;
import java.io.*;
import jramos.tiposDatos.*;
import jramos.capaIO.*;
import java.util.ArrayList;
/** Esta clase es para probar las clases que estoy creando, no forma parte del código oficial*/
/*class Main
{
public static void main(String arg[])
{ System.out.println("Se muestra una linea usando getenv()");
System.out.println(System.getProperty("user.home") + System.getProperty("file.separator") + "archivoCursos.txt");
///Pruebo la clase Hora
System.out.print("Introduzca una hora según los formatos 1° o 2°: ");
String linea = null;
Hora horaDePrueba = null;
try
{ BufferedReader entradaTeclado = new BufferedReader(new InputStreamReader(System.in));
linea = entradaTeclado.readLine();
}
catch(IOException IOE){ IOE.printStackTrace();}
try
{ horaDePrueba = new Hora(java.lang.Integer.valueOf(linea));
}
catch (NumberFormatException nfe)
{ try
{ horaDePrueba = new Hora(linea);
}
catch (HourOutOfRangeException hoor)
{ System.out.println("HourOutOfRangeException:" + hoor.getMessage());
System.exit(0);
}
}
catch (HourOutOfRangeException hoor)
{ System.out.println("HourOutOfRangeException:\n" + hoor.getMessage());
System.exit(0);
}
//horaDePrueba = new Hora(); //es para probar la excepcion al crear un objeto sin inicializar la hora.
try
{ System.out.println("La hora según 1° formato es : " + horaDePrueba.getHora());
System.out.println("La hora según 2° formato es: " + horaDePrueba.getHoraStr());
System.out.println("La hora según 3° formato es: " + horaDePrueba.getHoraStr2());
System.out.println("La hora según 4° formato es: " + horaDePrueba.getHoraStr3());
}
catch (HourNotInicializatedException hnie)
{ System.out.println("HourNotInicializatedException:\n" + hnie.getMessage());
System.exit(0);
}
}
}
*/
//Este main está hecho para probar la capaIOCursos
public class Main
{
public static void main(String args[])
{
//VentanaPrincipal window = new VentanaPrincipal();
//window.setVisible(true);
CapaIOCursos gestorIOCursos;
CapaIOProfes gestorIOProfes;
ArrayList<Curso> listaCursos;
ArrayList<Carrera> listaCarreras;
ArrayList<Semestre> listaSemestres;
ArrayList<Facultad> listaFacultades;
ArrayList<Profesor> listaProfesores;
try
{ gestorIOCursos = new CapaIOCursos();
gestorIOProfes = new CapaIOProfes();
listaCursos = gestorIOCursos.leeCursos();
listaCarreras = gestorIOCursos.leeCarreras();
listaSemestres = gestorIOCursos.leeSemestres();
listaFacultades = gestorIOCursos.leeFacultades();
listaProfesores = gestorIOProfes.leeProfes();
- //Referenciador.crearReferencias(listaCarreras, listaCursos, listaFacultades, listaProfesores, listaSemestres);
+ Referenciador.crearReferencias(listaCarreras, listaCursos, listaFacultades, listaProfesores, listaSemestres);
// Aqui se escriben
gestorIOCursos.escribeSemestres(listaSemestres);
gestorIOCursos.escribeCarreras(listaCarreras);
gestorIOCursos.escribeCursos(listaCursos);
gestorIOCursos.escribeFacultades(listaFacultades);
}
catch (Exception e)
{
System.out.println("ERROR");
}
}
}
//Este main es para probar la capaIOProfes
/*
public class Main
{
public static void main(String args[])
{ CapaIOProfes gestorIOProfes;
ArrayList<Profesor> lista;
try
{ gestorIOProfes = new CapaIOProfes();
lista = gestorIOProfes.leeProfes();
System.out.println(lista);
System.out.println("Ahora al revez, escribo una lista de profes en el archivo.");
gestorIOProfes.escribeProfes(lista, 7);
}
catch (Exception e)
{
System.out.println("ERROR");
}
}
}
*/
| true | true | public static void main(String args[])
{
//VentanaPrincipal window = new VentanaPrincipal();
//window.setVisible(true);
CapaIOCursos gestorIOCursos;
CapaIOProfes gestorIOProfes;
ArrayList<Curso> listaCursos;
ArrayList<Carrera> listaCarreras;
ArrayList<Semestre> listaSemestres;
ArrayList<Facultad> listaFacultades;
ArrayList<Profesor> listaProfesores;
try
{ gestorIOCursos = new CapaIOCursos();
gestorIOProfes = new CapaIOProfes();
listaCursos = gestorIOCursos.leeCursos();
listaCarreras = gestorIOCursos.leeCarreras();
listaSemestres = gestorIOCursos.leeSemestres();
listaFacultades = gestorIOCursos.leeFacultades();
listaProfesores = gestorIOProfes.leeProfes();
//Referenciador.crearReferencias(listaCarreras, listaCursos, listaFacultades, listaProfesores, listaSemestres);
// Aqui se escriben
gestorIOCursos.escribeSemestres(listaSemestres);
gestorIOCursos.escribeCarreras(listaCarreras);
gestorIOCursos.escribeCursos(listaCursos);
gestorIOCursos.escribeFacultades(listaFacultades);
}
catch (Exception e)
{
System.out.println("ERROR");
}
}
| public static void main(String args[])
{
//VentanaPrincipal window = new VentanaPrincipal();
//window.setVisible(true);
CapaIOCursos gestorIOCursos;
CapaIOProfes gestorIOProfes;
ArrayList<Curso> listaCursos;
ArrayList<Carrera> listaCarreras;
ArrayList<Semestre> listaSemestres;
ArrayList<Facultad> listaFacultades;
ArrayList<Profesor> listaProfesores;
try
{ gestorIOCursos = new CapaIOCursos();
gestorIOProfes = new CapaIOProfes();
listaCursos = gestorIOCursos.leeCursos();
listaCarreras = gestorIOCursos.leeCarreras();
listaSemestres = gestorIOCursos.leeSemestres();
listaFacultades = gestorIOCursos.leeFacultades();
listaProfesores = gestorIOProfes.leeProfes();
Referenciador.crearReferencias(listaCarreras, listaCursos, listaFacultades, listaProfesores, listaSemestres);
// Aqui se escriben
gestorIOCursos.escribeSemestres(listaSemestres);
gestorIOCursos.escribeCarreras(listaCarreras);
gestorIOCursos.escribeCursos(listaCursos);
gestorIOCursos.escribeFacultades(listaFacultades);
}
catch (Exception e)
{
System.out.println("ERROR");
}
}
|
diff --git a/src/edu/usf/cutr/siri/android/util/SiriJacksonConfig.java b/src/edu/usf/cutr/siri/android/util/SiriJacksonConfig.java
index 1ef091d..7c226f1 100644
--- a/src/edu/usf/cutr/siri/android/util/SiriJacksonConfig.java
+++ b/src/edu/usf/cutr/siri/android/util/SiriJacksonConfig.java
@@ -1,161 +1,161 @@
package edu.usf.cutr.siri.android.util;
import uk.org.siri.siri.Siri;
import com.fasterxml.aalto.stax.InputFactoryImpl;
import com.fasterxml.aalto.stax.OutputFactoryImpl;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.dataformat.xml.JacksonXmlModule;
import com.fasterxml.jackson.dataformat.xml.XmlFactory;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import edu.usf.cutr.siri.jackson.PascalCaseStrategy;
/**
* This class holds a static instance of a Jackson ObjectMapper that is
* configured for parsing Siri JSON responses
*
* The ObjectMapper is thread-safe after it is configured:
* http://wiki.fasterxml.com/JacksonFAQThreadSafety
*
* ...so we can configure it once here and then use it in multiple fragments.
*
* @author Sean J. Barbeau
*
*/
public class SiriJacksonConfig {
//For JSON
private static ObjectMapper mapper = null;
private static ObjectReader reader = null;
//For XML
private static XmlMapper xmlMapper = null;
/**
* Constructs a thread-safe instance of a Jackson ObjectMapper configured to parse
* JSON responses from a Mobile Siri API.
*
* According to Jackson Best Practices (http://wiki.fasterxml.com/JacksonBestPracticesPerformance),
* for efficiency reasons you should use the ObjectReader instead of the ObjectMapper.
*
* @deprecated
* @return thread-safe ObjectMapper configured for SIRI JSON responses
*/
public synchronized static ObjectMapper getObjectMapperInstance() {
return initObjectMapper();
}
/**
* Constructs a thread-safe instance of a Jackson ObjectReader configured to parse
* JSON responses from a Mobile Siri API
*
* According to Jackson Best Practices (http://wiki.fasterxml.com/JacksonBestPracticesPerformance),
* this should be more efficient than the ObjectMapper.
*
* @return thread-safe ObjectMapper configured for SIRI JSON responses
*/
public synchronized static ObjectReader getObjectReaderInstance() {
if(reader == null){
reader = initObjectMapper().reader(Siri.class);
}
return reader;
}
/**
* Internal method used to init main ObjectMapper for JSON parsing
* @return initialized ObjectMapper ready for JSON parsing
*/
private static ObjectMapper initObjectMapper(){
if (mapper == null) {
// Jackson configuration
mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.UNWRAP_ROOT_VALUE, true);
mapper.configure(
DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
mapper.configure(
DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
true);
mapper.configure(
DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY, true);
mapper.configure(DeserializationFeature.READ_ENUMS_USING_TO_STRING,
true);
// Tell Jackson to expect the JSON in PascalCase, instead of
// camelCase
mapper.setPropertyNamingStrategy(new PascalCaseStrategy());
}
return mapper;
}
/**
* Constructs a thread-safe instance of a Jackson XmlMapper configured to parse
* XML responses from a Mobile Siri API.
*
* @return thread-safe ObjectMapper configured for SIRI XML responses
*/
public synchronized static ObjectMapper getXmlMapperInstance() {
return initXmlMapper();
}
/**
* Internal method used to init main XmlMapper for XML parsing
* @return initialized XmlMapper ready for XML parsing
*/
private static XmlMapper initXmlMapper(){
if(xmlMapper == null){
// Use Aalto StAX implementation explicitly
XmlFactory f = new XmlFactory(new InputFactoryImpl(),
new OutputFactoryImpl());
JacksonXmlModule module = new JacksonXmlModule();
/*
* Tell Jackson that Lists are using "unwrapped" style (i.e.,
* there is no wrapper element for list). This fixes the error
* "com.fasterxml.jackson.databind.JsonMappingException: Can not
* >> instantiate value of type [simple type, class >>
* uk.org.siri.siri.VehicleMonitoringDelivery] from JSON String;
* no >> single-String constructor/factory method (through
* reference chain: >>
* uk.org.siri.siri.Siri["ServiceDelivery"]->
* uk.org.siri.siri.ServiceDel >>
* ivery["VehicleMonitoringDelivery"])"
*
* NOTE - This requires Jackson 2.1, which is still pre-release
* as of 9/12/2012
*/
module.setDefaultUseWrapper(false);
- XmlMapper xmlMapper = new XmlMapper(f, module);
+ xmlMapper = new XmlMapper(f, module);
xmlMapper.configure(
DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
true);
xmlMapper.configure(
DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.READ_ENUMS_USING_TO_STRING,
true);
// Tell Jackson to expect the XML in PascalCase, instead of
// camelCase
- xmlMapper.setPropertyNamingStrategy(new PascalCaseStrategy());
+ xmlMapper.setPropertyNamingStrategy(new PascalCaseStrategy());
}
return xmlMapper;
}
}
| false | true | private static XmlMapper initXmlMapper(){
if(xmlMapper == null){
// Use Aalto StAX implementation explicitly
XmlFactory f = new XmlFactory(new InputFactoryImpl(),
new OutputFactoryImpl());
JacksonXmlModule module = new JacksonXmlModule();
/*
* Tell Jackson that Lists are using "unwrapped" style (i.e.,
* there is no wrapper element for list). This fixes the error
* "com.fasterxml.jackson.databind.JsonMappingException: Can not
* >> instantiate value of type [simple type, class >>
* uk.org.siri.siri.VehicleMonitoringDelivery] from JSON String;
* no >> single-String constructor/factory method (through
* reference chain: >>
* uk.org.siri.siri.Siri["ServiceDelivery"]->
* uk.org.siri.siri.ServiceDel >>
* ivery["VehicleMonitoringDelivery"])"
*
* NOTE - This requires Jackson 2.1, which is still pre-release
* as of 9/12/2012
*/
module.setDefaultUseWrapper(false);
XmlMapper xmlMapper = new XmlMapper(f, module);
xmlMapper.configure(
DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
true);
xmlMapper.configure(
DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.READ_ENUMS_USING_TO_STRING,
true);
// Tell Jackson to expect the XML in PascalCase, instead of
// camelCase
xmlMapper.setPropertyNamingStrategy(new PascalCaseStrategy());
}
return xmlMapper;
}
| private static XmlMapper initXmlMapper(){
if(xmlMapper == null){
// Use Aalto StAX implementation explicitly
XmlFactory f = new XmlFactory(new InputFactoryImpl(),
new OutputFactoryImpl());
JacksonXmlModule module = new JacksonXmlModule();
/*
* Tell Jackson that Lists are using "unwrapped" style (i.e.,
* there is no wrapper element for list). This fixes the error
* "com.fasterxml.jackson.databind.JsonMappingException: Can not
* >> instantiate value of type [simple type, class >>
* uk.org.siri.siri.VehicleMonitoringDelivery] from JSON String;
* no >> single-String constructor/factory method (through
* reference chain: >>
* uk.org.siri.siri.Siri["ServiceDelivery"]->
* uk.org.siri.siri.ServiceDel >>
* ivery["VehicleMonitoringDelivery"])"
*
* NOTE - This requires Jackson 2.1, which is still pre-release
* as of 9/12/2012
*/
module.setDefaultUseWrapper(false);
xmlMapper = new XmlMapper(f, module);
xmlMapper.configure(
DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
true);
xmlMapper.configure(
DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
true);
xmlMapper
.configure(
DeserializationFeature.READ_ENUMS_USING_TO_STRING,
true);
// Tell Jackson to expect the XML in PascalCase, instead of
// camelCase
xmlMapper.setPropertyNamingStrategy(new PascalCaseStrategy());
}
return xmlMapper;
}
|
diff --git a/src/main/java/be/Balor/Player/sql/SQLPlayer.java b/src/main/java/be/Balor/Player/sql/SQLPlayer.java
index 41d805e7..0016f96c 100644
--- a/src/main/java/be/Balor/Player/sql/SQLPlayer.java
+++ b/src/main/java/be/Balor/Player/sql/SQLPlayer.java
@@ -1,763 +1,763 @@
/*This file is part of AdminCmd.
AdminCmd is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
AdminCmd is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with AdminCmd. If not, see <http://www.gnu.org/licenses/>.*/
package be.Balor.Player.sql;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import lib.SQL.PatPeter.SQLibrary.Database;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Player;
import be.Balor.Player.ACPlayer;
import be.Balor.Tools.Type;
import be.Balor.Tools.Utils;
import be.Balor.Tools.Debug.ACLogger;
import be.Balor.Tools.Debug.DebugLog;
import be.Balor.Tools.Files.ObjectContainer;
import be.Balor.Tools.Help.String.Str;
import be.Balor.Tools.Threads.PrepStmtExecutorTask;
import be.Balor.World.ACWorld;
import be.Balor.bukkit.AdminCmd.ACHelper;
import be.Balor.bukkit.AdminCmd.ACPluginManager;
import be.Balor.bukkit.AdminCmd.ConfigEnum;
/**
* @author Balor (aka Antoine Aflalo)
*
*/
public class SQLPlayer extends ACPlayer {
private final Map<String, Location> homes = Collections
.synchronizedMap(new HashMap<String, Location>());
private final Map<String, Object> infos = Collections
.synchronizedMap(new HashMap<String, Object>());
private final Map<Type, Object> powers = Collections
.synchronizedMap(new EnumMap<Type, Object>(Type.class));
private final Map<String, Object> customPowers = Collections
.synchronizedMap(new HashMap<String, Object>());
private final Map<String, Long> kitUses = Collections
.synchronizedMap(new HashMap<String, Long>());
private Location lastLoc;
private final long id;
private static PreparedStatement GET_HOMES, GET_INFOS, GET_POWERS,
GET_KIT_USES, GET_LASTLOC;
private static int prepStmtTaskID;
private static final PrepStmtExecutorTask PREP_STMT_TASK = new PrepStmtExecutorTask();
static {
initPrepStmt();
}
public static void initPrepStmt() {
GET_HOMES = Database.DATABASE
.prepare("SELECT `name`,`world`,`x`,`y`,`z`,`yaw`,`pitch` FROM `ac_homes` WHERE `player_id` = ?");
GET_POWERS = Database.DATABASE
.prepare("SELECT `key`,`info` FROM `ac_powers` WHERE `player_id` = ?");
GET_INFOS = Database.DATABASE
.prepare("SELECT `key`,`info` FROM `ac_informations` WHERE `player_id` = ?");
GET_KIT_USES = Database.DATABASE
.prepare("SELECT `kit`,`use` FROM `ac_kit_uses` WHERE `player_id` = ?");
GET_LASTLOC = Database.DATABASE
.prepare("SELECT `world`,`x`,`y`,`z`,`yaw`,`pitch` FROM ac_players WHERE id=?");
}
/**
* @param name
* @param id
*/
SQLPlayer(final String name, final long id) {
super(name);
this.id = id;
init();
}
SQLPlayer(final Player player, final long id) {
super(player);
this.id = id;
init();
}
private void init() {
synchronized (GET_LASTLOC) {
try {
GET_LASTLOC.clearParameters();
GET_LASTLOC.setLong(1, id);
ResultSet rs;
synchronized (GET_LASTLOC.getConnection()) {
rs = GET_LASTLOC.executeQuery();
}
if (rs.next()) {
final String worldName = rs.getString("world");
if (worldName != null && !worldName.isEmpty()) {
World world = Bukkit.getWorld(worldName);
if (world == null) {
- world = ACWorld.getWorld(world).getHandle();
+ world = ACWorld.getWorld(worldName).getHandle();
}
if (world != null) {
lastLoc = new Location(world, rs.getDouble("x"),
rs.getDouble("y"), rs.getDouble("z"),
rs.getFloat("yaw"), rs.getFloat("pitch"));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
}
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting last location from the DB", e);
}
}
synchronized (GET_HOMES) {
try {
GET_HOMES.clearParameters();
GET_HOMES.setLong(1, id);
ResultSet rs;
synchronized (GET_HOMES.getConnection()) {
rs = GET_HOMES.executeQuery();
}
while (rs.next()) {
final String worldName = rs.getString("world");
World world = Bukkit.getWorld(worldName);
if (world == null) {
world = ACWorld.getWorld(world).getHandle();
}
if (world != null) {
homes.put(
rs.getString("name"),
new Location(world, rs.getDouble("x"), rs
.getDouble("y"), rs.getDouble("z"), rs
.getFloat("yaw"), rs.getFloat("pitch")));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting homes from the DB", e);
}
}
synchronized (GET_POWERS) {
try {
GET_POWERS.clearParameters();
GET_POWERS.setLong(1, id);
ResultSet rs;
synchronized (GET_POWERS.getConnection()) {
rs = GET_POWERS.executeQuery();
}
while (rs.next()) {
final String powerName = rs.getString("key");
final Type power = Type.matchType(powerName);
synchronized (SQLObjectContainer.yaml) {
if (power == null) {
customPowers.put(powerName, SQLObjectContainer.yaml
.load(rs.getString("info")));
} else {
powers.put(power, SQLObjectContainer.yaml.load(rs
.getString("info")));
}
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting powers from the DB", e);
}
}
synchronized (GET_INFOS) {
try {
GET_INFOS.clearParameters();
GET_INFOS.setLong(1, id);
ResultSet rs;
synchronized (GET_INFOS.getConnection()) {
rs = GET_INFOS.executeQuery();
}
while (rs.next()) {
synchronized (SQLObjectContainer.yaml) {
infos.put(rs.getString("key"), SQLObjectContainer.yaml
.load(rs.getString("info")));
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting informations from the DB", e);
}
}
synchronized (GET_KIT_USES) {
try {
GET_KIT_USES.clearParameters();
GET_KIT_USES.setLong(1, id);
ResultSet rs;
synchronized (GET_KIT_USES.getConnection()) {
rs = GET_KIT_USES.executeQuery();
}
while (rs.next()) {
kitUses.put(rs.getString("kit"), rs.getLong("use"));
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting kit uses from the DB", e);
}
}
}
/**
* To be sure that all waiting prepStmt will be executed when this is called
*/
public static void forceExecuteStmts() {
PREP_STMT_TASK.run();
}
/**
* To Schedule the Async task
*/
public static void scheduleAsyncSave() {
if (ACPluginManager.getScheduler().isCurrentlyRunning(prepStmtTaskID)
|| ACPluginManager.getScheduler().isQueued(prepStmtTaskID)) {
return;
}
final int delay = ConfigEnum.E_PST_DELAY.getInt() >= 30 ? ConfigEnum.E_PST_DELAY
.getInt() : 10;
prepStmtTaskID = ACPluginManager
.getScheduler()
.runTaskTimerAsynchronously(
ACHelper.getInstance().getCoreInstance(),
PREP_STMT_TASK, Utils.secInTick * 2 * delay,
Utils.secInTick * delay).getTaskId();
DebugLog.INSTANCE.info("IO Save RepeatingTask created : "
+ prepStmtTaskID);
}
/**
* To stop the saving task.
*/
public static void stopSavingTask() {
if (!ACPluginManager.getScheduler().isCurrentlyRunning(prepStmtTaskID)
&& !ACPluginManager.getScheduler().isQueued(prepStmtTaskID)) {
return;
}
ACPluginManager.getScheduler().cancelTask(prepStmtTaskID);
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setHome(java.lang.String,
* org.bukkit.Location)
*/
@Override
public void setHome(final String home, final Location loc) {
homes.put(home, loc);
try {
final PreparedStatement insertHome = Database.DATABASE
.prepare("REPLACE INTO `ac_homes` (`name`, `player_id`, `world`, `x`, `y`, `z`, `yaw`, `pitch`)"
+ " VALUES (?,?,?,?,?,?,?,?)");
insertHome.setString(1, home);
insertHome.setLong(2, id);
insertHome.setString(3, loc.getWorld().getName());
insertHome.setDouble(4, loc.getX());
insertHome.setDouble(5, loc.getY());
insertHome.setDouble(6, loc.getZ());
insertHome.setFloat(7, loc.getYaw());
insertHome.setFloat(8, loc.getPitch());
PREP_STMT_TASK.addPreparedStmt(insertHome);
} catch (final SQLException e) {
ACLogger.severe("Problem with inserting the home in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#removeHome(java.lang.String)
*/
@Override
public void removeHome(final String home) {
if (homes.remove(home) != null) {
final PreparedStatement deleteHome = Database.DATABASE
.prepare("delete FROM `ac_homes` WHERE `player_id`=? AND `name`=?");
try {
deleteHome.setLong(1, id);
deleteHome.setString(2, home);
PREP_STMT_TASK.addPreparedStmt(deleteHome);
} catch (final SQLException e) {
ACLogger.severe("Problem with deleting the home from the DB", e);
}
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getHome(java.lang.String)
*/
@Override
public Location getHome(final String home) {
Location loc = homes.get(home);
if (loc == null) {
final String homeName = Str.matchString(getHomeList(), name);
if (homeName == null) {
return null;
}
loc = homes.get(homeName);
}
return loc;
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getHomeList()
*/
@Override
public Set<String> getHomeList() {
return Collections.unmodifiableSet(homes.keySet());
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setInformation(java.lang.String,
* java.lang.Object)
*/
@Override
public void setInformation(final String info, final Object value) {
if (value == null) {
return;
}
infos.put(info, value);
final PreparedStatement insertInfo = Database.DATABASE
.prepare("REPLACE INTO `ac_informations` (`key` ,`player_id` ,`info`) VALUES (?, ?, ?)");
try {
insertInfo.setString(1, info);
insertInfo.setLong(2, id);
insertInfo.setString(3, value.toString());
PREP_STMT_TASK.addPreparedStmt(insertInfo);
} catch (final SQLException e) {
ACLogger.severe("Problem with insert info in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#removeInformation(java.lang.String)
*/
@Override
public void removeInformation(final String info) {
if (infos.remove(info) != null) {
final PreparedStatement deleteInfo = Database.DATABASE
.prepare("delete FROM `ac_informations` WHERE `player_id`=? AND `key`=?");
try {
deleteInfo.setLong(1, id);
deleteInfo.setString(2, info);
PREP_STMT_TASK.addPreparedStmt(deleteInfo);
} catch (final SQLException e) {
ACLogger.severe("Problem with deleting the info from the DB", e);
}
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getInformation(java.lang.String)
*/
@Override
public ObjectContainer getInformation(final String info) {
return new SQLObjectContainer(infos.get(info));
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getInformationsList()
*/
@Override
public Set<String> getInformationsList() {
return Collections.unmodifiableSet(infos.keySet());
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setLastLocation(org.bukkit.Location)
*/
@Override
public void setLastLocation(final Location loc) {
lastLoc = loc;
final PreparedStatement updateLastLoc = Database.DATABASE
.prepare("UPDATE `ac_players` SET `world` = ?, `x` = ?, `y` = ?, `z` = ?, `yaw` = ?, `pitch` = ? WHERE `ac_players`.`id` = ?;");
try {
updateLastLoc.clearParameters();
if (loc != null) {
updateLastLoc.setString(1, loc.getWorld().getName());
updateLastLoc.setDouble(2, loc.getX());
updateLastLoc.setDouble(3, loc.getY());
updateLastLoc.setDouble(4, loc.getZ());
updateLastLoc.setFloat(5, loc.getYaw());
updateLastLoc.setFloat(6, loc.getPitch());
} else {
updateLastLoc.setNull(1, Types.VARCHAR);
updateLastLoc.setNull(2, Types.DOUBLE);
updateLastLoc.setNull(3, Types.DOUBLE);
updateLastLoc.setNull(4, Types.DOUBLE);
updateLastLoc.setNull(5, Types.FLOAT);
updateLastLoc.setNull(6, Types.FLOAT);
}
updateLastLoc.setLong(7, id);
PREP_STMT_TASK.addPreparedStmt(updateLastLoc);
} catch (final SQLException e) {
ACLogger.severe("Problem with updating lastLoc in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getLastLocation()
*/
@Override
public Location getLastLocation() {
return lastLoc;
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setPower(be.Balor.Tools.Type,
* java.lang.Object)
*/
@Override
public void setPower(final Type power, final Object value) {
powers.put(power, value);
final PreparedStatement insertPower = Database.DATABASE
.prepare("REPLACE INTO `ac_powers` (`key`, `player_id`, `info`, `category`) VALUES (?, ?, ?, ?);");
try {
insertPower.setString(1, power.name());
insertPower.setLong(2, id);
if (power == Type.EGG) {
synchronized (SQLObjectContainer.yaml) {
insertPower.setString(3,
SQLObjectContainer.yaml.dump(value));
}
} else {
insertPower.setString(3, value.toString());
}
insertPower.setString(4, power.getCategory().name());
PREP_STMT_TASK.addPreparedStmt(insertPower);
} catch (final SQLException e) {
ACLogger.severe("Problem with inserting power in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setCustomPower(java.lang.String,
* java.lang.Object)
*/
@Override
public void setCustomPower(final String power, final Object value) {
customPowers.put(power, value);
final PreparedStatement insertPower = Database.DATABASE
.prepare("REPLACE INTO `ac_powers` (`key`, `player_id`, `info`, `category`) VALUES (?, ?, ?, ?);");
try {
insertPower.clearParameters();
insertPower.setString(1, power);
insertPower.setLong(2, id);
insertPower.setString(3, value.toString());
insertPower.setString(4, Type.Category.OTHER.name());
PREP_STMT_TASK.addPreparedStmt(insertPower);
} catch (final SQLException e) {
ACLogger.severe("Problem with inserting power in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getCustomPower(java.lang.String)
*/
@Override
public ObjectContainer getCustomPower(final String power) {
return new ObjectContainer(customPowers.get(power));
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#hasCustomPower(java.lang.String)
*/
@Override
public boolean hasCustomPower(final String power) {
return customPowers.containsKey(power);
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#removeCustomPower(java.lang.String)
*/
@Override
public void removeCustomPower(final String power) {
if (customPowers.remove(power) != null) {
final PreparedStatement deletePower = Database.DATABASE
.prepare("delete FROM `ac_powers` WHERE `player_id`=? AND `key`=?");
try {
deletePower.clearParameters();
deletePower.setLong(1, id);
deletePower.setString(2, power);
PREP_STMT_TASK.addPreparedStmt(deletePower);
} catch (final SQLException e) {
ACLogger.severe("Problem with deleting customPower in the DB",
e);
}
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getPower(be.Balor.Tools.Type)
*/
@Override
public ObjectContainer getPower(final Type power) {
return new SQLObjectContainer(powers.get(power));
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#hasPower(be.Balor.Tools.Type)
*/
@Override
public boolean hasPower(final Type power) {
return powers.containsKey(power);
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#removePower(be.Balor.Tools.Type)
*/
@Override
public void removePower(final Type power) {
if (powers.remove(power) != null) {
final PreparedStatement deletePower = Database.DATABASE
.prepare("delete FROM `ac_powers` WHERE `player_id`=? AND `key`=?");
try {
deletePower.clearParameters();
deletePower.setLong(1, id);
deletePower.setString(2, power.name());
PREP_STMT_TASK.addPreparedStmt(deletePower);
} catch (final SQLException e) {
ACLogger.severe("Problem with deleting power from the DB", e);
}
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#removeAllSuperPower()
*/
@Override
public void removeAllSuperPower() {
boolean found = false;
for (final Type power : powers.keySet()) {
if (power.getCategory() != Type.Category.SUPER_POWER) {
continue;
}
powers.remove(power);
found = true;
if (power != Type.FLY) {
continue;
}
if (handler == null) {
continue;
}
handler.setFlying(false);
handler.setAllowFlight(false);
}
if (found) {
final PreparedStatement deleteSuperPowers = Database.DATABASE
.prepare("delete FROM `ac_powers` WHERE `player_id`=? AND `category`='"
+ Type.Category.SUPER_POWER.name() + "'");
try {
deleteSuperPowers.clearParameters();
deleteSuperPowers.setLong(1, id);
PREP_STMT_TASK.addPreparedStmt(deleteSuperPowers);
} catch (final SQLException e) {
ACLogger.severe(
"Problem with deleting super powers from the DB", e);
}
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setLastKitUse(java.lang.String, long)
*/
@Override
public void setLastKitUse(final String kit, final long timestamp) {
kitUses.put(kit, timestamp);
final PreparedStatement insertKitUse = Database.DATABASE
.prepare("REPLACE INTO `ac_kit_uses` (`kit`, `player_id`, `use`) VALUES (?, ?, ?);");
try {
insertKitUse.clearParameters();
insertKitUse.setString(1, kit);
insertKitUse.setLong(2, id);
insertKitUse.setLong(3, timestamp);
PREP_STMT_TASK.addPreparedStmt(insertKitUse);
} catch (final SQLException e) {
ACLogger.severe("Problem with inserting kit_use in the DB", e);
}
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getLastKitUse(java.lang.String)
*/
@Override
public long getLastKitUse(final String kit) {
final Long use = kitUses.get(kit);
if (use == null) {
return 0L;
}
return use.longValue();
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getKitUseList()
*/
@Override
public Set<String> getKitUseList() {
return Collections.unmodifiableSet(kitUses.keySet());
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#forceSave()
*/
@Override
protected void forceSave() {
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getPowers()
*/
@Override
public Map<String, String> getPowersString() {
final Map<String, String> result = new TreeMap<String, String>();
for (final Entry<Type, Object> entry : powers.entrySet()) {
result.put(entry.getKey().name(), entry.getValue().toString());
}
for (final Entry<String, Object> entry : customPowers.entrySet()) {
result.put(entry.getKey(), entry.getValue().toString());
}
return result;
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#setPresentation(java.lang.String)
*/
@Override
public void setPresentation(final String presentation) {
setInformation("presentation", presentation);
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getPresentation()
*/
@Override
public String getPresentation() {
final ObjectContainer pres = getInformation("presentation");
if (pres.isNull()) {
return "";
}
return pres.getString();
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getPowers()
*/
@Override
public Map<Type, Object> getPowers() {
return Collections.unmodifiableMap(powers);
}
/*
* (Non javadoc)
*
* @see be.Balor.Player.ACPlayer#getCustomPowers()
*/
@Override
public Map<String, Object> getCustomPowers() {
return Collections.unmodifiableMap(customPowers);
}
}
| true | true | private void init() {
synchronized (GET_LASTLOC) {
try {
GET_LASTLOC.clearParameters();
GET_LASTLOC.setLong(1, id);
ResultSet rs;
synchronized (GET_LASTLOC.getConnection()) {
rs = GET_LASTLOC.executeQuery();
}
if (rs.next()) {
final String worldName = rs.getString("world");
if (worldName != null && !worldName.isEmpty()) {
World world = Bukkit.getWorld(worldName);
if (world == null) {
world = ACWorld.getWorld(world).getHandle();
}
if (world != null) {
lastLoc = new Location(world, rs.getDouble("x"),
rs.getDouble("y"), rs.getDouble("z"),
rs.getFloat("yaw"), rs.getFloat("pitch"));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
}
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting last location from the DB", e);
}
}
synchronized (GET_HOMES) {
try {
GET_HOMES.clearParameters();
GET_HOMES.setLong(1, id);
ResultSet rs;
synchronized (GET_HOMES.getConnection()) {
rs = GET_HOMES.executeQuery();
}
while (rs.next()) {
final String worldName = rs.getString("world");
World world = Bukkit.getWorld(worldName);
if (world == null) {
world = ACWorld.getWorld(world).getHandle();
}
if (world != null) {
homes.put(
rs.getString("name"),
new Location(world, rs.getDouble("x"), rs
.getDouble("y"), rs.getDouble("z"), rs
.getFloat("yaw"), rs.getFloat("pitch")));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting homes from the DB", e);
}
}
synchronized (GET_POWERS) {
try {
GET_POWERS.clearParameters();
GET_POWERS.setLong(1, id);
ResultSet rs;
synchronized (GET_POWERS.getConnection()) {
rs = GET_POWERS.executeQuery();
}
while (rs.next()) {
final String powerName = rs.getString("key");
final Type power = Type.matchType(powerName);
synchronized (SQLObjectContainer.yaml) {
if (power == null) {
customPowers.put(powerName, SQLObjectContainer.yaml
.load(rs.getString("info")));
} else {
powers.put(power, SQLObjectContainer.yaml.load(rs
.getString("info")));
}
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting powers from the DB", e);
}
}
synchronized (GET_INFOS) {
try {
GET_INFOS.clearParameters();
GET_INFOS.setLong(1, id);
ResultSet rs;
synchronized (GET_INFOS.getConnection()) {
rs = GET_INFOS.executeQuery();
}
while (rs.next()) {
synchronized (SQLObjectContainer.yaml) {
infos.put(rs.getString("key"), SQLObjectContainer.yaml
.load(rs.getString("info")));
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting informations from the DB", e);
}
}
synchronized (GET_KIT_USES) {
try {
GET_KIT_USES.clearParameters();
GET_KIT_USES.setLong(1, id);
ResultSet rs;
synchronized (GET_KIT_USES.getConnection()) {
rs = GET_KIT_USES.executeQuery();
}
while (rs.next()) {
kitUses.put(rs.getString("kit"), rs.getLong("use"));
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting kit uses from the DB", e);
}
}
}
| private void init() {
synchronized (GET_LASTLOC) {
try {
GET_LASTLOC.clearParameters();
GET_LASTLOC.setLong(1, id);
ResultSet rs;
synchronized (GET_LASTLOC.getConnection()) {
rs = GET_LASTLOC.executeQuery();
}
if (rs.next()) {
final String worldName = rs.getString("world");
if (worldName != null && !worldName.isEmpty()) {
World world = Bukkit.getWorld(worldName);
if (world == null) {
world = ACWorld.getWorld(worldName).getHandle();
}
if (world != null) {
lastLoc = new Location(world, rs.getDouble("x"),
rs.getDouble("y"), rs.getDouble("z"),
rs.getFloat("yaw"), rs.getFloat("pitch"));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
}
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting last location from the DB", e);
}
}
synchronized (GET_HOMES) {
try {
GET_HOMES.clearParameters();
GET_HOMES.setLong(1, id);
ResultSet rs;
synchronized (GET_HOMES.getConnection()) {
rs = GET_HOMES.executeQuery();
}
while (rs.next()) {
final String worldName = rs.getString("world");
World world = Bukkit.getWorld(worldName);
if (world == null) {
world = ACWorld.getWorld(world).getHandle();
}
if (world != null) {
homes.put(
rs.getString("name"),
new Location(world, rs.getDouble("x"), rs
.getDouble("y"), rs.getDouble("z"), rs
.getFloat("yaw"), rs.getFloat("pitch")));
} else {
ACLogger.warning("The World " + worldName
+ " is not loaded");
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting homes from the DB", e);
}
}
synchronized (GET_POWERS) {
try {
GET_POWERS.clearParameters();
GET_POWERS.setLong(1, id);
ResultSet rs;
synchronized (GET_POWERS.getConnection()) {
rs = GET_POWERS.executeQuery();
}
while (rs.next()) {
final String powerName = rs.getString("key");
final Type power = Type.matchType(powerName);
synchronized (SQLObjectContainer.yaml) {
if (power == null) {
customPowers.put(powerName, SQLObjectContainer.yaml
.load(rs.getString("info")));
} else {
powers.put(power, SQLObjectContainer.yaml.load(rs
.getString("info")));
}
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting powers from the DB", e);
}
}
synchronized (GET_INFOS) {
try {
GET_INFOS.clearParameters();
GET_INFOS.setLong(1, id);
ResultSet rs;
synchronized (GET_INFOS.getConnection()) {
rs = GET_INFOS.executeQuery();
}
while (rs.next()) {
synchronized (SQLObjectContainer.yaml) {
infos.put(rs.getString("key"), SQLObjectContainer.yaml
.load(rs.getString("info")));
}
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe(
"Problem with getting informations from the DB", e);
}
}
synchronized (GET_KIT_USES) {
try {
GET_KIT_USES.clearParameters();
GET_KIT_USES.setLong(1, id);
ResultSet rs;
synchronized (GET_KIT_USES.getConnection()) {
rs = GET_KIT_USES.executeQuery();
}
while (rs.next()) {
kitUses.put(rs.getString("kit"), rs.getLong("use"));
}
rs.close();
} catch (final SQLException e) {
ACLogger.severe("Problem with getting kit uses from the DB", e);
}
}
}
|
diff --git a/src/main/org/testng/internal/BaseTestMethod.java b/src/main/org/testng/internal/BaseTestMethod.java
index 9c171f57..3ba14f08 100644
--- a/src/main/org/testng/internal/BaseTestMethod.java
+++ b/src/main/org/testng/internal/BaseTestMethod.java
@@ -1,599 +1,599 @@
package org.testng.internal;
import java.lang.reflect.Method;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import org.testng.IClass;
import org.testng.IRetryAnalyzer;
import org.testng.ITestClass;
import org.testng.ITestNGMethod;
import org.testng.internal.annotations.IAnnotationFinder;
import org.testng.internal.annotations.ITestOrConfiguration;
import org.testng.internal.thread.IAtomicInteger;
import org.testng.internal.thread.ThreadUtil;
/**
* Superclass to represent both @Test and @Configuration methods.
*/
public abstract class BaseTestMethod implements ITestNGMethod {
/** The test class on which the test method was found. Note that this is not
* necessarily the declaring class. */
protected ITestClass m_testClass;
protected final transient Class<?> m_methodClass;
protected final transient Method m_method;
protected String m_id = "";
protected long m_date = System.currentTimeMillis();
protected final transient IAnnotationFinder m_annotationFinder;
protected String[] m_groups = {};
protected String[] m_groupsDependedUpon = {};
protected String[] m_methodsDependedUpon = {};
protected String[] m_beforeGroups = {};
protected String[] m_afterGroups = {};
private boolean m_isAlwaysRun;
// Methods are not serialized, but we can serialize their hashCode
private final String m_signature;
private final String m_methodName;
// If a depended group is not found
private String m_missingGroup;
private String m_description = null;
protected IAtomicInteger m_currentInvocationCount = ThreadUtil.createAtomicInteger(0);
private int m_parameterInvocationCount = 1;
private IRetryAnalyzer retryAnalyzer = null;
/**
* Constructs a <code>BaseTestMethod</code> TODO cquezel JavaDoc.
*
* @param method
* @param annotationFinder
*/
public BaseTestMethod(Method method, IAnnotationFinder annotationFinder) {
m_methodClass = method.getDeclaringClass();
m_method = method;
m_methodName = m_method.getName();
m_annotationFinder = annotationFinder;
m_signature = initSignature();
}
/**
* {@inheritDoc}
*/
public boolean isAlwaysRun() {
return m_isAlwaysRun;
}
/**
* TODO cquezel JavaDoc.
*
* @param alwaysRun
*/
protected void setAlwaysRun(boolean alwaysRun) {
m_isAlwaysRun = alwaysRun;
}
/**
* {@inheritDoc}
*/
public Class<?> getRealClass() {
return m_methodClass;
}
/**
* {@inheritDoc}
*/
public ITestClass getTestClass() {
return m_testClass;
}
/**
* {@inheritDoc}
*/
public void setTestClass(ITestClass tc) {
assert null != tc;
if (! tc.getRealClass().equals(m_method.getDeclaringClass())) {
assert m_method.getDeclaringClass().isAssignableFrom(tc.getRealClass()) :
"\nMISMATCH : " + tc.getRealClass() + " " + m_method.getDeclaringClass();
}
m_testClass = tc;
}
/**
* TODO cquezel JavaDoc.
*
* @param o
* @return
*/
public int compareTo(Object o) {
int result = -2;
Class<?> thisClass = getRealClass();
Class<?> otherClass = ((ITestNGMethod) o).getRealClass();
if (thisClass.isAssignableFrom(otherClass))
result = -1;
else if (otherClass.isAssignableFrom(thisClass))
result = 1;
else if (equals(o))
result = 0;
return result;
}
/**
* {@inheritDoc}
*/
public Method getMethod() {
return m_method;
}
/**
* {@inheritDoc}
*/
public String getMethodName() {
return m_methodName;
}
/**
* {@inheritDoc}
*/
public Object[] getInstances() {
return m_testClass.getInstances(false);
}
/**
* {@inheritDoc}
*/
public long[] getInstanceHashCodes() {
return m_testClass.getInstanceHashCodes();
}
/**
* {@inheritDoc}
* @return the addition of groups defined on the class and on this method.
*/
public String[] getGroups() {
return m_groups;
}
/**
* {@inheritDoc}
*/
public String[] getGroupsDependedUpon() {
return m_groupsDependedUpon;
}
/**
* {@inheritDoc}
*/
public String[] getMethodsDependedUpon() {
return m_methodsDependedUpon;
}
/**
* {@inheritDoc}
*/
public boolean isTest() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isBeforeSuiteConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isAfterSuiteConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isBeforeTestConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isAfterTestConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isBeforeGroupsConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isAfterGroupsConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isBeforeClassConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isAfterClassConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isBeforeMethodConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isAfterMethodConfiguration() {
return false;
}
/**
* {@inheritDoc}
*/
public long getTimeOut() {
return 0L;
}
/**
* {@inheritDoc}
* @return the number of times this method needs to be invoked.
*/
public int getInvocationCount() {
return 1;
}
/**
* No-op.
*/
public void setInvocationCount(int counter) {
}
/**
* {@inheritDoc} Default value for successPercentage.
*/
public int getSuccessPercentage() {
return 100;
}
/**
* {@inheritDoc}
*/
public String getId() {
return m_id;
}
/**
* {@inheritDoc}
*/
public void setId(String id) {
m_id = id;
}
/**
* {@inheritDoc}
* @return Returns the date.
*/
public long getDate() {
return m_date;
}
/**
* {@inheritDoc}
* @param date The date to set.
*/
public void setDate(long date) {
m_date = date;
}
/**
* {@inheritDoc}
*/
public boolean canRunFromClass(IClass testClass) {
return m_methodClass.isAssignableFrom(testClass.getRealClass());
}
/**
* {@inheritDoc} Compares two BaseTestMethod using the test class then the associated
* Java Method.
*/
@Override
public boolean equals(Object obj) {
// TODO CQ document why this try block exists.
try {
BaseTestMethod other = (BaseTestMethod) obj;
boolean isEqual = m_testClass == null ? other.m_testClass == null
: m_testClass.getRealClass().equals(other.m_testClass.getRealClass());
return isEqual && m_method.equals(other.m_method);
}
catch(Exception ex) {
return false;
}
}
/**
* {@inheritDoc} This implementation returns the associated Java Method's hash code.
* @Return the associated Java Method's hash code.
*/
@Override
public int hashCode() {
return m_method.hashCode();
}
/**
* TODO cquezel JavaDoc.
*
* @param annotationClass
*/
protected void initGroups(Class<?> annotationClass) {
//
// Init groups
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroups(getStringArray(null != annotation ? annotation.getGroups() : null,
null != classAnnotation ? classAnnotation.getGroups() : null));
}
//
// Init groups depended upon
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroupsDependedUpon(
getStringArray(null != annotation ? annotation.getDependsOnGroups() : null,
null != classAnnotation ? classAnnotation.getDependsOnGroups() : null));
String[] methodsDependedUpon =
getStringArray(null != annotation ? annotation.getDependsOnMethods() : null,
null != classAnnotation ? classAnnotation.getDependsOnMethods() : null);
// Qualify these methods if they don't have a package
for (int i = 0; i < methodsDependedUpon.length; i++) {
String m = methodsDependedUpon[i];
if (m.indexOf(".") < 0) {
- methodsDependedUpon[i] =
- MethodHelper.calculateMethodCanonicalName(m_methodClass, methodsDependedUpon[i]);
+ m = MethodHelper.calculateMethodCanonicalName(m_methodClass, methodsDependedUpon[i]);
+ methodsDependedUpon[i] = m != null ? m : methodsDependedUpon[i];
}
}
setMethodsDependedUpon(methodsDependedUpon);
}
}
/**
* TODO cquezel JavaDoc.
*
* @return
*/
protected IAnnotationFinder getAnnotationFinder() {
return m_annotationFinder;
}
/**
* TODO cquezel JavaDoc.
*
* @return
*/
protected IClass getIClass() {
return m_testClass;
}
/**
* TODO cquezel JavaDoc.
*
* @return
*/
protected String getSignature() {
return m_signature;
}
/**
* TODO cquezel JavaDoc.
*
* @return
*/
private String initSignature() {
Method m = getMethod();
String cls = m.getDeclaringClass().getName();
StringBuffer result = new StringBuffer(cls + "." + m.getName() + "(");
int i = 0;
for (Class<?> p : m.getParameterTypes()) {
if (i++ > 0) result.append(", ");
result.append(p.getName());
}
result.append(")");
return result.toString();
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return getSignature();
}
/**
* TODO cquezel JavaDoc.
*
* @param methodArray
* @param classArray
* @return
*/
protected String[] getStringArray(String[] methodArray, String[] classArray) {
Map<String, String> vResult = new HashMap<String, String>();
if (null != methodArray) {
for (String m : methodArray) {
vResult.put(m, m);
}
}
if (null != classArray) {
for (String m : classArray) {
vResult.put(m, m);
}
}
return vResult.values().toArray(new String[vResult.size()]);
}
protected void setGroups(String[] groups) {
m_groups = groups;
}
protected void setGroupsDependedUpon(String[] groups) {
m_groupsDependedUpon = groups;
}
protected void setMethodsDependedUpon(String[] methods) {
m_methodsDependedUpon = methods;
}
/**
* {@inheritDoc}
*/
public void addMethodDependedUpon(String method) {
String[] newMethods = new String[m_methodsDependedUpon.length + 1];
newMethods[0] = method;
for (int i =1; i < newMethods.length; i++) {
newMethods[i] = m_methodsDependedUpon[i - 1];
}
m_methodsDependedUpon = newMethods;
}
private static void ppp(String s) {
System.out.println("[BaseTestMethod] " + s);
}
/** Compares two ITestNGMethod by date. */
public static final Comparator<?> DATE_COMPARATOR = new Comparator<Object>() {
public int compare(Object o1, Object o2) {
try {
ITestNGMethod m1 = (ITestNGMethod) o1;
ITestNGMethod m2 = (ITestNGMethod) o2;
return (int) (m1.getDate() - m2.getDate());
}
catch(Exception ex) {
return 0; // TODO CQ document this logic
}
}
};
/**
* {@inheritDoc}
*/
public String getMissingGroup() {
return m_missingGroup;
}
/**
* {@inheritDoc}
*/
public void setMissingGroup(String group) {
m_missingGroup = group;
}
/**
* {@inheritDoc}
*/
public int getThreadPoolSize() {
return 0;
}
/**
* No-op.
* @param threadPoolSize
*/
public void setThreadPoolSize(int threadPoolSize) {
}
/**
* TODO cquezel JavaDoc.
*
* @param description
*/
public void setDescription(String description) {
m_description = description;
}
/**
* {@inheritDoc}
*/
public String getDescription() {
return m_description;
}
/**
* {@inheritDoc}
*/
public String[] getBeforeGroups() {
return m_beforeGroups;
}
/**
* {@inheritDoc}
*/
public String[] getAfterGroups() {
return m_afterGroups;
}
public void incrementCurrentInvocationCount() {
m_currentInvocationCount.incrementAndGet();
}
public int getCurrentInvocationCount() {
return m_currentInvocationCount.get();
}
public void setParameterInvocationCount(int n) {
m_parameterInvocationCount = n;
}
public int getParameterInvocationCount() {
return m_parameterInvocationCount;
}
public abstract ITestNGMethod clone();
public IRetryAnalyzer getRetryAnalyzer() {
return retryAnalyzer;
}
public void setRetryAnalyzer(IRetryAnalyzer retryAnalyzer) {
this.retryAnalyzer = retryAnalyzer;
}
}
| true | true | protected void initGroups(Class<?> annotationClass) {
//
// Init groups
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroups(getStringArray(null != annotation ? annotation.getGroups() : null,
null != classAnnotation ? classAnnotation.getGroups() : null));
}
//
// Init groups depended upon
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroupsDependedUpon(
getStringArray(null != annotation ? annotation.getDependsOnGroups() : null,
null != classAnnotation ? classAnnotation.getDependsOnGroups() : null));
String[] methodsDependedUpon =
getStringArray(null != annotation ? annotation.getDependsOnMethods() : null,
null != classAnnotation ? classAnnotation.getDependsOnMethods() : null);
// Qualify these methods if they don't have a package
for (int i = 0; i < methodsDependedUpon.length; i++) {
String m = methodsDependedUpon[i];
if (m.indexOf(".") < 0) {
methodsDependedUpon[i] =
MethodHelper.calculateMethodCanonicalName(m_methodClass, methodsDependedUpon[i]);
}
}
setMethodsDependedUpon(methodsDependedUpon);
}
}
| protected void initGroups(Class<?> annotationClass) {
//
// Init groups
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroups(getStringArray(null != annotation ? annotation.getGroups() : null,
null != classAnnotation ? classAnnotation.getGroups() : null));
}
//
// Init groups depended upon
//
{
ITestOrConfiguration annotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod(),
annotationClass);
ITestOrConfiguration classAnnotation =
(ITestOrConfiguration) getAnnotationFinder().findAnnotation(getMethod().getDeclaringClass(),
annotationClass);
setGroupsDependedUpon(
getStringArray(null != annotation ? annotation.getDependsOnGroups() : null,
null != classAnnotation ? classAnnotation.getDependsOnGroups() : null));
String[] methodsDependedUpon =
getStringArray(null != annotation ? annotation.getDependsOnMethods() : null,
null != classAnnotation ? classAnnotation.getDependsOnMethods() : null);
// Qualify these methods if they don't have a package
for (int i = 0; i < methodsDependedUpon.length; i++) {
String m = methodsDependedUpon[i];
if (m.indexOf(".") < 0) {
m = MethodHelper.calculateMethodCanonicalName(m_methodClass, methodsDependedUpon[i]);
methodsDependedUpon[i] = m != null ? m : methodsDependedUpon[i];
}
}
setMethodsDependedUpon(methodsDependedUpon);
}
}
|
diff --git a/software/camod/src/gov/nih/nci/camod/webapp/action/ViewTOCSearchResultsAction.java b/software/camod/src/gov/nih/nci/camod/webapp/action/ViewTOCSearchResultsAction.java
index ea1a5a7a..620ef852 100755
--- a/software/camod/src/gov/nih/nci/camod/webapp/action/ViewTOCSearchResultsAction.java
+++ b/software/camod/src/gov/nih/nci/camod/webapp/action/ViewTOCSearchResultsAction.java
@@ -1,161 +1,163 @@
/**
*
* $Id: ViewTOCSearchResultsAction.java,v 1.12 2009-06-11 17:42:53 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.11 2009/06/01 17:02:53 pandyas
* getting ready for QA build
*
* Revision 1.10 2008/10/22 18:19:07 schroedn
* Removed the criteria table data from the search results for TOC
*
* Revision 1.9 2008/07/28 17:20:47 pandyas
* Modifed to prevent SQL inject - added HTTP Header
* App scan performed on July 24, 2008
*
* Revision 1.8 2008/07/15 15:18:48 pandyas
* minor change
*
* Revision 1.7 2008/07/15 15:18:26 pandyas
* Modified to prevent SQL injection
* Scan conducted on July 14 2008
*
* Revision 1.6 2008/05/27 14:58:04 pandyas
* Removed debug statements
*
* Revision 1.5 2008/05/27 14:52:52 pandyas
* Modified to prevent SQL injection
* Cleaned parameter name before proceeding
* Re: Apps Scan run 05/23/2008
*
* Revision 1.4 2008/05/21 19:06:53 pandyas
* Modified TOC action to prevent SQL injection
* Re: Apps Scan run 05/15/2008
*
* Revision 1.3 2006/04/17 19:09:40 pandyas
* caMod 2.1 OM changes
*
*
*/
package gov.nih.nci.camod.webapp.action;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.service.impl.*;
import gov.nih.nci.camod.util.NameValueList;
import gov.nih.nci.camod.util.SafeHTMLUtil;
import java.util.Enumeration;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.*;
public class ViewTOCSearchResultsAction extends BaseAction {
public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.info("In ViewTOCSearchResultsAction.execute");
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.info("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
sID = request.getHeader("Referer");
// prevents Referer Header injection
- if ( sID != null && sID != "" && !sID.contains("camod")) {
+ if ( sID != null && sID != "" && sID.contains("camod")) {}
+ if ( sID != null && sID != "" && sID.contains("emice")) {}
+ else {
return (mapping.findForward("failure"));
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.info("ViewTOCSearchResultsAction headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("ViewTOCSearchResultsAction cleaned headername: " + headername);
}
String theForward = "next";
try {
// clean TOCQUERYKEY ahead of try loop - then loop checks if it is a valid choice - security scan code
String theKey = (String) request.getParameter(Constants.Parameters.TOCQUERYKEY);
if (theKey != null && theKey.length() > 0) {
theKey = SafeHTMLUtil.clean(theKey);
log.info("ViewTOCSearchResultsAction cleaned theKey: " + theKey);
}
//Remove any retained criteriatable values
request.getSession().setAttribute(Constants.CRITERIATABLE, "" );
request.getSession().setAttribute(Constants.NOSAVEOPTION, "false");
// This is meant to prevent SQL injection into the key value of the TOC queries
// Security scan failed so this is checked first and the query will not run unless clean
if (theKey != null && theKey.length() > 0)
{
NameValueList.generateTableOfContentsList();
request.getSession().setAttribute(Constants.Dropdowns.SEARCHTOCDROP, NameValueList.getTableOfContentsList());
if (!SafeHTMLUtil.isValidValue(theKey,Constants.Dropdowns.SEARCHTOCDROP,request))
{
// set theForward to failure - fail gracefully but do not let query continue
System.out.println("TOC: The key is not an accepted value - end query " + theKey);
theForward = "failure";
} else {
log.debug("theKey is a valid value - continue with querry: " + theKey);
// Handle external linkage
if (request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS) == null) {
// Get the TOC manager workflow
TOCManager theTOCManager = new TOCManager(getServlet().getServletContext().getRealPath("/")
+ Constants.TOCSearch.TOC_QUERY_FILE);
List theResults = theTOCManager.process();
log.debug("TOC: " + theResults);
request.getSession().setAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS, theResults);
}
List theGroupList = (List) request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS);
log.debug("theGroupList: " + theGroupList);
for (int i = 0; i < theGroupList.size(); i++) {
TOCQueryGroup theQueryGroup = (TOCQueryGroup) theGroupList.get(i);
List theQueryList = theQueryGroup.getQueries();
for (int j = 0; j < theQueryList.size(); j++) {
TOCQuery theQuery = (TOCQuery) theQueryList.get(j);
if (theQuery.getKey().equals(theKey)) {
request.getSession().setAttribute(Constants.SEARCH_RESULTS, theQuery.getResults());
log.info("TOC theQuery.getResults(): " + theQuery.getResults());
break;
}
}
}
} // end of SafeHTMLUtil.isValidValue
} // end of theKey != null
} catch (Exception e) {
theForward = "failure";
log.debug("Caught an error running the canned query: ", e);
// Encountered an error saving the model.
ActionMessages theMsg = new ActionMessages();
theMsg.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("errors.admin.message"));
saveErrors(request, theMsg);
}
log.info("Exiting ViewTOCSearchResultsAction theForward: "+ theForward);
return mapping.findForward(theForward);
}
}
| true | true | public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.info("In ViewTOCSearchResultsAction.execute");
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.info("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
sID = request.getHeader("Referer");
// prevents Referer Header injection
if ( sID != null && sID != "" && !sID.contains("camod")) {
return (mapping.findForward("failure"));
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.info("ViewTOCSearchResultsAction headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("ViewTOCSearchResultsAction cleaned headername: " + headername);
}
String theForward = "next";
try {
// clean TOCQUERYKEY ahead of try loop - then loop checks if it is a valid choice - security scan code
String theKey = (String) request.getParameter(Constants.Parameters.TOCQUERYKEY);
if (theKey != null && theKey.length() > 0) {
theKey = SafeHTMLUtil.clean(theKey);
log.info("ViewTOCSearchResultsAction cleaned theKey: " + theKey);
}
//Remove any retained criteriatable values
request.getSession().setAttribute(Constants.CRITERIATABLE, "" );
request.getSession().setAttribute(Constants.NOSAVEOPTION, "false");
// This is meant to prevent SQL injection into the key value of the TOC queries
// Security scan failed so this is checked first and the query will not run unless clean
if (theKey != null && theKey.length() > 0)
{
NameValueList.generateTableOfContentsList();
request.getSession().setAttribute(Constants.Dropdowns.SEARCHTOCDROP, NameValueList.getTableOfContentsList());
if (!SafeHTMLUtil.isValidValue(theKey,Constants.Dropdowns.SEARCHTOCDROP,request))
{
// set theForward to failure - fail gracefully but do not let query continue
System.out.println("TOC: The key is not an accepted value - end query " + theKey);
theForward = "failure";
} else {
log.debug("theKey is a valid value - continue with querry: " + theKey);
// Handle external linkage
if (request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS) == null) {
// Get the TOC manager workflow
TOCManager theTOCManager = new TOCManager(getServlet().getServletContext().getRealPath("/")
+ Constants.TOCSearch.TOC_QUERY_FILE);
List theResults = theTOCManager.process();
log.debug("TOC: " + theResults);
request.getSession().setAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS, theResults);
}
List theGroupList = (List) request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS);
log.debug("theGroupList: " + theGroupList);
for (int i = 0; i < theGroupList.size(); i++) {
TOCQueryGroup theQueryGroup = (TOCQueryGroup) theGroupList.get(i);
List theQueryList = theQueryGroup.getQueries();
for (int j = 0; j < theQueryList.size(); j++) {
TOCQuery theQuery = (TOCQuery) theQueryList.get(j);
if (theQuery.getKey().equals(theKey)) {
request.getSession().setAttribute(Constants.SEARCH_RESULTS, theQuery.getResults());
log.info("TOC theQuery.getResults(): " + theQuery.getResults());
break;
}
}
}
} // end of SafeHTMLUtil.isValidValue
} // end of theKey != null
} catch (Exception e) {
theForward = "failure";
log.debug("Caught an error running the canned query: ", e);
// Encountered an error saving the model.
ActionMessages theMsg = new ActionMessages();
theMsg.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("errors.admin.message"));
saveErrors(request, theMsg);
}
log.info("Exiting ViewTOCSearchResultsAction theForward: "+ theForward);
return mapping.findForward(theForward);
}
| public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.info("In ViewTOCSearchResultsAction.execute");
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.info("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
sID = request.getHeader("Referer");
// prevents Referer Header injection
if ( sID != null && sID != "" && sID.contains("camod")) {}
if ( sID != null && sID != "" && sID.contains("emice")) {}
else {
return (mapping.findForward("failure"));
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.info("ViewTOCSearchResultsAction headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("ViewTOCSearchResultsAction cleaned headername: " + headername);
}
String theForward = "next";
try {
// clean TOCQUERYKEY ahead of try loop - then loop checks if it is a valid choice - security scan code
String theKey = (String) request.getParameter(Constants.Parameters.TOCQUERYKEY);
if (theKey != null && theKey.length() > 0) {
theKey = SafeHTMLUtil.clean(theKey);
log.info("ViewTOCSearchResultsAction cleaned theKey: " + theKey);
}
//Remove any retained criteriatable values
request.getSession().setAttribute(Constants.CRITERIATABLE, "" );
request.getSession().setAttribute(Constants.NOSAVEOPTION, "false");
// This is meant to prevent SQL injection into the key value of the TOC queries
// Security scan failed so this is checked first and the query will not run unless clean
if (theKey != null && theKey.length() > 0)
{
NameValueList.generateTableOfContentsList();
request.getSession().setAttribute(Constants.Dropdowns.SEARCHTOCDROP, NameValueList.getTableOfContentsList());
if (!SafeHTMLUtil.isValidValue(theKey,Constants.Dropdowns.SEARCHTOCDROP,request))
{
// set theForward to failure - fail gracefully but do not let query continue
System.out.println("TOC: The key is not an accepted value - end query " + theKey);
theForward = "failure";
} else {
log.debug("theKey is a valid value - continue with querry: " + theKey);
// Handle external linkage
if (request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS) == null) {
// Get the TOC manager workflow
TOCManager theTOCManager = new TOCManager(getServlet().getServletContext().getRealPath("/")
+ Constants.TOCSearch.TOC_QUERY_FILE);
List theResults = theTOCManager.process();
log.debug("TOC: " + theResults);
request.getSession().setAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS, theResults);
}
List theGroupList = (List) request.getSession().getAttribute(Constants.TOCSearch.TOC_QUERY_RESULTS);
log.debug("theGroupList: " + theGroupList);
for (int i = 0; i < theGroupList.size(); i++) {
TOCQueryGroup theQueryGroup = (TOCQueryGroup) theGroupList.get(i);
List theQueryList = theQueryGroup.getQueries();
for (int j = 0; j < theQueryList.size(); j++) {
TOCQuery theQuery = (TOCQuery) theQueryList.get(j);
if (theQuery.getKey().equals(theKey)) {
request.getSession().setAttribute(Constants.SEARCH_RESULTS, theQuery.getResults());
log.info("TOC theQuery.getResults(): " + theQuery.getResults());
break;
}
}
}
} // end of SafeHTMLUtil.isValidValue
} // end of theKey != null
} catch (Exception e) {
theForward = "failure";
log.debug("Caught an error running the canned query: ", e);
// Encountered an error saving the model.
ActionMessages theMsg = new ActionMessages();
theMsg.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("errors.admin.message"));
saveErrors(request, theMsg);
}
log.info("Exiting ViewTOCSearchResultsAction theForward: "+ theForward);
return mapping.findForward(theForward);
}
|
diff --git a/tests/org.jboss.tools.ui.bot.ext/src/org/jboss/tools/ui/bot/ext/SWTBotExt.java b/tests/org.jboss.tools.ui.bot.ext/src/org/jboss/tools/ui/bot/ext/SWTBotExt.java
index a746ee36a..46c2d5558 100644
--- a/tests/org.jboss.tools.ui.bot.ext/src/org/jboss/tools/ui/bot/ext/SWTBotExt.java
+++ b/tests/org.jboss.tools.ui.bot.ext/src/org/jboss/tools/ui/bot/ext/SWTBotExt.java
@@ -1,333 +1,338 @@
/*******************************************************************************
* Copyright (c) 2007-2009 Red Hat, Inc.
* Distributed under license by Red Hat, Inc. All rights reserved.
* This program is made available under the terms of the
* Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Contributor:
* Red Hat, Inc. - initial API and implementation
******************************************************************************/
package org.jboss.tools.ui.bot.ext;
import static org.eclipse.swtbot.swt.finder.matchers.WidgetMatcherFactory.allOf;
import static org.eclipse.swtbot.swt.finder.matchers.WidgetMatcherFactory.widgetOfType;
import static org.eclipse.swtbot.swt.finder.matchers.WidgetMatcherFactory.withStyle;
import static org.eclipse.swtbot.swt.finder.matchers.WidgetMatcherFactory.withText;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.eclipse.swt.SWT;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Scale;
import org.eclipse.swtbot.eclipse.finder.SWTWorkbenchBot;
import org.eclipse.swtbot.eclipse.finder.widgets.SWTBotEditor;
import org.eclipse.swtbot.swt.finder.SWTBot;
import org.eclipse.swtbot.swt.finder.exceptions.WidgetNotFoundException;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotButton;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotCCombo;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotCheckBox;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotMenu;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotShell;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotTable;
import org.eclipse.swtbot.swt.finder.widgets.SWTBotTree;
import org.eclipse.ui.forms.widgets.Hyperlink;
import org.eclipse.ui.forms.widgets.Section;
import org.eclipse.ui.forms.widgets.Twistie;
import org.hamcrest.Matcher;
import org.jboss.tools.ui.bot.ext.parts.SWTBotBrowserExt;
import org.jboss.tools.ui.bot.ext.parts.SWTBotEditorExt;
import org.jboss.tools.ui.bot.ext.parts.SWTBotHyperlinkExt;
import org.jboss.tools.ui.bot.ext.parts.SWTBotScaleExt;
import org.jboss.tools.ui.bot.ext.parts.SWTBotTwistie;
import org.jboss.tools.ui.bot.ext.widgets.SWTBotSection;
/**
* Extended version of SWTWorkbenchBot, logging added
*
* @author jpeterka
*
*/
public class SWTBotExt extends SWTWorkbenchBot {
private Logger log = Logger.getLogger(SWTBotExt.class);
public void logAndFail(String msg) {
log.error(msg);
fail(msg);
}
// ------------------------------------------------------------
// SWTBot method wrapper ( for better logging mainly )
// ------------------------------------------------------------
@Override
public SWTBotMenu menu(String text) {
log.info("Menu \"" + text + "\" selected");
return super.menu(text);
}
@Override
public SWTBotButton button(String text) {
log.info("Button \"" + text + "\" selected");
return super.button(text);
}
@Override
public SWTBotTree tree() {
log.info("Tree selected");
return super.tree();
}
@Override
public SWTBotCCombo ccomboBox(String text) {
log.info("Combobox \"" + text + "\" selected");
return super.ccomboBox(text);
}
@Override
public SWTBotTable table() {
log.info("Table selected");
return super.table();
}
public void sleep(long ms, String msg) {
StackTraceElement[] ste = Thread.currentThread().getStackTrace();
if ((ste != null) && (ste[3] != null))
log.info("Bot sleeps for " + ms + " ms " + msg + " " + ste[3].toString());
else
log.info("Bot sleeps for " + ms + " ms " + msg);
super.sleep(ms);
}
@Override
public void sleep(long ms) {
sleep(ms,"");
}
public SWTBotEditorExt swtBotEditorExtByTitle(String fileName) {
SWTBotEditor editor = super.editorByTitle(fileName);
return new SWTBotEditorExt(editor.toTextEditor().getReference(),
(SWTWorkbenchBot) this);
}
@SuppressWarnings("unchecked")
public SWTBotBrowserExt browserExt() {
try {
List<Browser> bsrs = (List<Browser>) widgets(widgetOfType(Browser.class));
return new SWTBotBrowserExt(bsrs.get(0));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Browser", ex);
}
}
@SuppressWarnings("unchecked")
public SWTBotBrowserExt browserByTitle(String title) {
SWTBotEditor editor = editorByTitle(title);
try {
List<Browser> bsrs = (List<Browser>) editor.bot().widgets(
widgetOfType(Browser.class));
return new SWTBotBrowserExt(bsrs.get(0));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Browser", ex);
}
}
public SWTBotScaleExt scaleExt() {
return scaleExt(0);
}
@SuppressWarnings("unchecked")
public SWTBotScaleExt scaleExt(int index) {
try {
List<Scale> bsrs = (List<Scale>) widgets(widgetOfType(Scale.class));
return new SWTBotScaleExt(bsrs.get(index));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Browser", ex);
}
}
@SuppressWarnings("unchecked")
public SWTBotHyperlinkExt hyperlink(String text) {
try {
List<Hyperlink> bsrs = (List<Hyperlink>) widgets(allOf(widgetOfType(Hyperlink.class),withText(text)));
return new SWTBotHyperlinkExt(bsrs.get(0));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Hyperlink", ex);
}
}
@SuppressWarnings("unchecked")
public SWTBotHyperlinkExt hyperlink(int index) {
try {
List<Hyperlink> bsrs = (List<Hyperlink>) widgets(widgetOfType(Hyperlink.class));
return new SWTBotHyperlinkExt(bsrs.get(index));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Hyperlink", ex);
}
}
public SWTBotHyperlinkExt hyperlink() {
return hyperlink(0);
}
public SWTBotButton clickButton(String text) {
return button(text).click();
}
@SuppressWarnings("unchecked")
public SWTBotSection section(String label) {
try {
List<Section> sections = (List<Section>)widgets(allOf(withText(label),widgetOfType(Section.class)));
return new SWTBotSection(sections.get(0));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Section", ex);
}
}
@SuppressWarnings("unchecked")
public SWTBotSection section(SWTBot bot, String label) {
try {
List<Section> sections = (List<Section>)bot.widgets(allOf(withText(label),widgetOfType(Section.class)));
return new SWTBotSection(sections.get(0));
} catch (WidgetNotFoundException ex) {
throw new WidgetNotFoundException(
"Could not find widget of type Section", ex);
}
}
@SuppressWarnings("unchecked")
public SWTBotTwistie twistieByLabel(String label){
List<Twistie> list = (List<Twistie>)widgets(widgetOfType(Twistie.class));
SWTBotTwistie twistieBot = null;
for (Twistie twistie : list) {
twistieBot = new SWTBotTwistie(twistie);
if (twistieBot.getLabelText().equals(label)) return twistieBot;
}
//SWTBotTwistie twistie = new SWTBotTwistie((Twistie)widget(allOf(widgetOfType(Twistie.class), WidgetMatcherFactory.withLabel(label))));
//twistie.getId();
throw new WidgetNotFoundException("Widget of type Twistie with label \""+label+"\" was not found");
}
/**
* Waits for shell with given title.
* Maximum waiting time is 30 seconds.
*
* @param shellTitle Title of shell which it should wait for.
* @return Shell which it was waiting for or <code>null</code>
* if the shell was not displayed during waiting time.
* @see #waitForShell(String, int)
*/
public SWTBotShell waitForShell(final String shellTitle) {
return waitForShell(shellTitle, -1);
}
/**
* Waits for desired shell with given timeout
* and return this shell when it is displayed.
*
* @param shellTitle Title of desired shell.
* @param maxTimeout Maximum waiting time in seconds (actual timeout can be one second more).
* Negative value means default timeout (30 seconds).
* @return Shell which it was waiting for or <code>null</code>
* if the shell was not displayed during waiting time.
*/
public SWTBotShell waitForShell(final String shellTitle, final int maxTimeout) {
if (shellTitle == null) {
throw new IllegalArgumentException("shellTitle cannot be null");
}
final int SLEEP_TIME = Timing.time2S();
final int ATTEMPTS_TIMEOUT = getAttemptsTimeout((maxTimeout < 0 ? 30 : maxTimeout), SLEEP_TIME);
for (int i = 0; i <= ATTEMPTS_TIMEOUT; i++) {
for (SWTBotShell shell : shells()) {
- if (shellTitle.equals(shell.getText())) {
- return shell;
+ try {
+ if (shellTitle.equals(shell.getText())) {
+ return shell;
+ }
+ } catch (WidgetNotFoundException wnfe) {
+ log.info("Waiting for shell: " + wnfe.getMessage());
+ // do nothing more but continue in loop
}
}
if (i < ATTEMPTS_TIMEOUT) {
sleep(SLEEP_TIME);
}
}
return null;
}
/**
* Waits until there is desired number of shells.
* It is useful sometimes when you cannot address exactly
* what you are waiting for but you know that the right situation
* is when there is <code>desiredNumberOfSheels</code> of shells.
* For example you are waiting for closing of some dialog so there are
* two shells and after closing this dialog there is one desired shell.
*
* @param desiredNumberOfShells Number of desired shells.
* @param maxTimeout Maximum time to wait in seconds. If this value is negative,
* default timeout is used (30 seconds).
* @return Shells which exists after timeout or after achieving desired
* number of shells.
*/
public SWTBotShell[] waitForNumberOfShells(final int desiredNumberOfShells, final int maxTimeout) {
final int SLEEP_TIME = Timing.time2S();
final int ATTEMPTS_TIMEOUT = getAttemptsTimeout((maxTimeout < 0 ? 30 : maxTimeout), SLEEP_TIME);
for (int i = 0; i < ATTEMPTS_TIMEOUT; i++) {
if (shells().length != desiredNumberOfShells) {
sleep(SLEEP_TIME);
} else {
break;
}
}
return shells();
}
/**
* Waits until there is desired number of shells which exist
* with default timeout (30 seconds).
*
* @param desiredNumberOfShells
* @return Shells which exists after timeout or after achieving desired
* number of shells.
*/
public SWTBotShell[] waitForNumberOfShells(final int desiredNumberOfShells) {
return waitForNumberOfShells(desiredNumberOfShells, -1);
}
/**
* If you need a loop in which some condition is tested and you want to know
* how many loops should be done before timeout, give to this method <code>maxTimeout</code>
* in seconds and desired delay between loops and this method returns number of loops
* that should be done.
*
* @param maxTimeout Maximum waiting time in seconds.
* @param sleepTime Waiting time in milliseconds between attempts.
* @return Number of attempts which is approximately equals to <code>maxTimeout</code>
* with <code>sleepTime</code> in each loop.
*/
private int getAttemptsTimeout(final int maxTimeout, final int sleepTime) {
return ((int)Math.ceil((double)(maxTimeout * 1000) / (double)sleepTime));
}
/**
* Returns current displayed checkboxes.
* There are included disabled checkboxes as well
* therefore it is necessary to check their states before use of them.
*
* @return List of current displayed checkboxes.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public List<SWTBotCheckBox> checkBoxes() {
List<SWTBotCheckBox> checkBoxes = new ArrayList<SWTBotCheckBox>();
Matcher matcher = allOf(widgetOfType(Button.class), withStyle(SWT.CHECK, "SWT.CHECK"));
for (Object widget : widgets(matcher)) {
if (widget != null) {
checkBoxes.add(new SWTBotCheckBox((Button) widget));
}
}
return checkBoxes;
}
}
| true | true | public SWTBotShell waitForShell(final String shellTitle, final int maxTimeout) {
if (shellTitle == null) {
throw new IllegalArgumentException("shellTitle cannot be null");
}
final int SLEEP_TIME = Timing.time2S();
final int ATTEMPTS_TIMEOUT = getAttemptsTimeout((maxTimeout < 0 ? 30 : maxTimeout), SLEEP_TIME);
for (int i = 0; i <= ATTEMPTS_TIMEOUT; i++) {
for (SWTBotShell shell : shells()) {
if (shellTitle.equals(shell.getText())) {
return shell;
}
}
if (i < ATTEMPTS_TIMEOUT) {
sleep(SLEEP_TIME);
}
}
return null;
}
| public SWTBotShell waitForShell(final String shellTitle, final int maxTimeout) {
if (shellTitle == null) {
throw new IllegalArgumentException("shellTitle cannot be null");
}
final int SLEEP_TIME = Timing.time2S();
final int ATTEMPTS_TIMEOUT = getAttemptsTimeout((maxTimeout < 0 ? 30 : maxTimeout), SLEEP_TIME);
for (int i = 0; i <= ATTEMPTS_TIMEOUT; i++) {
for (SWTBotShell shell : shells()) {
try {
if (shellTitle.equals(shell.getText())) {
return shell;
}
} catch (WidgetNotFoundException wnfe) {
log.info("Waiting for shell: " + wnfe.getMessage());
// do nothing more but continue in loop
}
}
if (i < ATTEMPTS_TIMEOUT) {
sleep(SLEEP_TIME);
}
}
return null;
}
|
diff --git a/src/checkstyle/com/puppycrawl/tools/checkstyle/checks/EmptyStatementCheck.java b/src/checkstyle/com/puppycrawl/tools/checkstyle/checks/EmptyStatementCheck.java
index ffb0448a6..240b828b4 100644
--- a/src/checkstyle/com/puppycrawl/tools/checkstyle/checks/EmptyStatementCheck.java
+++ b/src/checkstyle/com/puppycrawl/tools/checkstyle/checks/EmptyStatementCheck.java
@@ -1,52 +1,52 @@
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2002 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.checks;
import com.puppycrawl.tools.checkstyle.api.Check;
import com.puppycrawl.tools.checkstyle.api.TokenTypes;
import com.puppycrawl.tools.checkstyle.api.DetailAST;
/**
* <p>
* Check that finds empty statements.
* </p>
* <p>
* An example of how to configure the check is:
* </p>
* <pre>
* <module name="EmptyStatement"/>
* </pre>
* @author Rick Giles
* @version 1.0
*/
public class EmptyStatementCheck extends Check
{
/** @see com.puppycrawl.tools.checkstyle.api.Check */
public int[] getDefaultTokens()
{
- return new int[] { TokenTypes.EMPTY_STAT };
+ return new int[] {TokenTypes.EMPTY_STAT};
}
/** @see com.puppycrawl.tools.checkstyle.api.Check */
public void visitToken(DetailAST aAST)
{
log(aAST.getLineNo(), aAST.getColumnNo(), "empty.statement");
}
}
| true | true | public int[] getDefaultTokens()
{
return new int[] { TokenTypes.EMPTY_STAT };
}
| public int[] getDefaultTokens()
{
return new int[] {TokenTypes.EMPTY_STAT};
}
|
diff --git a/tests/src/org/fourdnest/androidclient/test/mapview/MapTest.java b/tests/src/org/fourdnest/androidclient/test/mapview/MapTest.java
index 573ed4e..b85e506 100644
--- a/tests/src/org/fourdnest/androidclient/test/mapview/MapTest.java
+++ b/tests/src/org/fourdnest/androidclient/test/mapview/MapTest.java
@@ -1,30 +1,30 @@
package org.fourdnest.androidclient.test.mapview;
import org.fourdnest.androidclient.tools.OsmStaticMapGetter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import android.net.Uri;
import android.test.AndroidTestCase;
public class MapTest extends AndroidTestCase {
@Before
protected void setUp() throws Exception {
super.setUp();
}
@After
protected void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testGetMediaFile() throws Exception {
- boolean val = (new OsmStaticMapGetter().getStaticMap(Uri.parse("/sdcard/test2.png"), null));
+ boolean val = (new OsmStaticMapGetter().getStaticMap(null));
assertTrue(val);
}
}
| true | true | public void testGetMediaFile() throws Exception {
boolean val = (new OsmStaticMapGetter().getStaticMap(Uri.parse("/sdcard/test2.png"), null));
assertTrue(val);
}
| public void testGetMediaFile() throws Exception {
boolean val = (new OsmStaticMapGetter().getStaticMap(null));
assertTrue(val);
}
|
diff --git a/src/main/java/de/beimax/spacealert/mission/MissionImpl.java b/src/main/java/de/beimax/spacealert/mission/MissionImpl.java
index 83f77bc..7f57986 100644
--- a/src/main/java/de/beimax/spacealert/mission/MissionImpl.java
+++ b/src/main/java/de/beimax/spacealert/mission/MissionImpl.java
@@ -1,788 +1,790 @@
/**
* This file is part of the JSpaceAlertMissionGenerator software.
* Copyright (C) 2011 Maximilian Kalus
* See http://www.beimax.de/ and https://github.com/mkalus/JSpaceAlertMissionGenerator
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
**/
package de.beimax.spacealert.mission;
import java.util.ArrayList;
import java.util.Random;
import java.util.logging.Level;
import java.util.logging.Logger;
import de.beimax.spacealert.util.Options;
import de.beimax.spacealert.mission.ThreatGroup;
/**
* Default Mission Generator
* @author mkalus
*/
public class MissionImpl implements Mission {
static private Logger logger = Logger.getLogger("MissionLogger");
static {
// debugging option set?
if (Options.getOptions().debug) logger.setLevel(Level.FINEST);
else logger.setLevel(Level.WARNING);
}
/**
* configuration: threat level (8 for std game)
*/
private int threatLevel = 8;
/**
* ...of which 1 level is unconfirmed (for 5 players)
*/
private int threatUnconfirmed = 1;
/**
* ...of which x levels are internal
*/
private int minInternalThreats = 1;
private int maxInternalThreats = 3;
private int maxInternalThreatsNumber = 2; // number of internal threats max
/**
* minimum and maximum time in which normal threats can occur
*/
private int minTNormalExternalThreat = 1;
private int maxTNormalExternalThreat = 8;
/**
* minimum and maximum time in which serious threats can occur
*/
private int minTSeriousExternalThreat = 2;
private int maxTSeriousExternalThreat = 7;
/**
* minimum and maximum time in which normal threats can occur
*/
private int minTNormalInternalThreat = 2;
private int maxTNormalInternalThreat = 7;
/**
* minimum and maximum time in which serious threats can occur
*/
private int minTSeriousInternalThreat = 3;
private int maxTSeriousInternalThreat = 6;
/**
* minimum data operations (either data transfer or incoming data)
*/
private int[] minDataOperations = {2, 2, 0};
private int[] maxDataOperations = {3, 3, 1};
/**
* minimum and maximum incoming data by phases
*/
private int[] minIncomingData = {1, 0, 0};
private int[] maxIncomingData = {3, 2, 0};
private int minIncomingDataTotal = 2;
/**
* minimum and maximum data transfers by phases
*/
private int[] minDataTransfer = {0, 1, 1};
private int[] maxDataTransfer = {1, 2, 1};
private int minDataTransferTotal = 3;
/**
* minimum and maximum time for white noise
*/
private int minWhiteNoise = 45;
private int maxWhiteNoise = 60;
private int minWhiteNoiseTime = 9;
private int maxWhiteNoiseTime = 20;
/**
* minimum and maximum time for phases
*/
private int[] minPhaseTime = {205, 180, 140};
private int[] maxPhaseTime = {240, 225, 155};
/**
* times for first threats to appear
*/
private int[] minTimeForFirst = { 10, 10 };
private int[] maxTimeForFirst = { 20, 40 };
/**
* chance for ambush in phases 4/8 in %
*/
private int[] chanceForAmbush = { 40, 40 };
/**
* "middle" threats (2+3/5+6) should appear with % of phase length
*/
private int threatsWithInPercent = 70;
/**
* keeps threats
*/
private ThreatGroup[] threats;
/**
* keeps incoming and data transfers
*/
private int[] incomingData;
private int[] dataTransfers;
/**
* white noise chunks in seconds (to distribute)
*/
private WhiteNoise[] whiteNoise;
/**
* phase times in seconds
*/
private int[] phaseTimes;
/**
* event list
*/
EventList eventList;
/**
* random number generator
*/
Random generator;
/**
* Constructor
*/
public MissionImpl() {
// get Options
Options options = Options.getOptions();
long seed;
if (options.seed == null) seed = System.nanoTime() + 8682522807148012L;
else seed = (long) options.seed;
// random number generator
generator = new Random(seed);
// copy variables from options
threatLevel = options.threatLevel;
threatUnconfirmed = options.threatUnconfirmed;
minInternalThreats = options.minInternalThreats;
maxInternalThreats = options.maxInternalThreats;
maxInternalThreatsNumber = options.maxInternalThreatsNumber;
minTNormalExternalThreat = options.minTNormalExternalThreat;
maxTNormalExternalThreat = options.maxTNormalExternalThreat;
minTSeriousExternalThreat = options.minTSeriousExternalThreat;
maxTSeriousExternalThreat = options.maxTSeriousExternalThreat;
minTNormalInternalThreat = options.minTNormalInternalThreat;
maxTNormalInternalThreat = options.maxTNormalInternalThreat;
minTSeriousInternalThreat = options.minTSeriousInternalThreat;
maxTSeriousInternalThreat = options.maxTSeriousInternalThreat;
minDataOperations = new int[]{ options.minDataOperations1, options.minDataOperations2, options.minDataOperations3 };
maxDataOperations = new int[]{ options.maxDataOperations1, options.maxDataOperations2, options.maxDataOperations3 };
minIncomingData = new int[]{ options.minIncomingData1, options.minIncomingData2, options.minIncomingData3 };
maxIncomingData = new int[]{ options.maxIncomingData1, options.maxIncomingData2, options.maxIncomingData3 };
minIncomingDataTotal = options.minIncomingDataTotal;
minDataTransfer = new int[]{ options.minDataTransfer1, options.minDataTransfer1, options.minDataTransfer3 };
maxDataTransfer = new int[]{ options.maxDataTransfer1, options.maxDataTransfer2, options.maxDataTransfer3 };
minDataTransferTotal = options.minDataTransferTotal;
minWhiteNoise = options.minWhiteNoise;
maxWhiteNoise = options.maxWhiteNoise;
minWhiteNoiseTime = options.minWhiteNoiseTime;
maxWhiteNoiseTime = options.maxWhiteNoiseTime;
minPhaseTime = new int[]{ options.minPhaseTime1, options.minPhaseTime2, options.minPhaseTime3 };
maxPhaseTime = new int[]{ options.maxPhaseTime1, options.maxPhaseTime2, options.maxPhaseTime3 };
minTimeForFirst = new int[]{ options.minTimeForFirst1, options.minTimeForFirst2 };
maxTimeForFirst = new int[]{ options.maxTimeForFirst1, options.maxTimeForFirst2 };
chanceForAmbush = new int[] { options.chanceForAmbush1, options.chanceForAmbush2 };
threatsWithInPercent = options.threatsWithInPercent;
// print out seed?
if (options.printSeed)
System.out.println("Random number generator seed: " + seed);
}
/**
* Return event list of mission
* @return ordered event list of mission
*/
public EventList getMissionEvents() {
return eventList;
}
/**
* Return length of a phase in seconds
* @param phase 1-3
* @return phase length of mission or -1
*/
public int getMissionPhaseLength(int phase) {
if (phase < 1 || phase > phaseTimes.length) return -1;
return phaseTimes[phase-1];
}
/**
* Generate new mission
*
* @return true if mission creation succeeded
*/
public boolean generateMission() {
// generate threats
boolean generated;
int tries = 100; //maximum number of tries to generate mission
do {generated = generateThreats();} while(!generated && tries-- > 0);
if (!generated) {
logger.warning("Giving up creating threats.");
return false; //fail
}
// generate data transfer and incoming data
generated = false; tries = 100;
do {generated = generateDataOperations();} while(!generated && tries-- > 0);
if (!generated) {
logger.warning("Giving up creating data operations.");
return false; //fail
}
//generate times
generateTimes();
//generate phases
generated = false; tries = 100;
do {generated = generatePhases();} while(!generated && tries-- > 0);
if (!generated) {
logger.warning("Giving up creating phase details.");
return false; //fail
}
return false;
}
/**
* "sane" generator method for threats
* @return true if generation was successful
*/
protected boolean generateThreats() {
// number of internal threats
int internalThreats = generator.nextInt(maxInternalThreats - minInternalThreats + 1) + minInternalThreats;
int externalThreats = threatLevel - internalThreats;
logger.fine("Threat Level: " + threatLevel + "; interal = " + internalThreats + ", external = " + externalThreats);
// generate number of serious threats
int seriousThreats = generator.nextInt(threatLevel / 2 + 1);
// if we only have serious threats and normal unconfirmed reports: reduce number of threats by 1
if (threatUnconfirmed % 2 == 1 && seriousThreats * 2 == threatLevel)
seriousThreats--;
int normalThreats = threatLevel - seriousThreats * 2;
logger.fine("Normal Threats: " + normalThreats + "; Serious Threats: " + seriousThreats);
// if there are 8 normal threats - check again, if we really want this
if (normalThreats == 8 && generator.nextInt(3) != 0) {
logger.info("8 normal threats unlikely. Redoing.");
return false;
}
// get sums
int threatsSum = normalThreats + seriousThreats;
// if threat level is higher than 8, create serious threats until we have a threat level of 8 or lower
// thanks to Leif Norcott from BoardGameGeek
while (threatsSum > 8) {
normalThreats -= 2;
seriousThreats++;
threatsSum = normalThreats + seriousThreats;
}
// distribute unconfirmed
int seriousUnconfirmed = generator.nextInt(threatUnconfirmed / 2 + 1);
int normalUnconfirmed = threatUnconfirmed - seriousUnconfirmed * 2;
if (normalUnconfirmed > normalThreats) { // adjust, if there are not enough threats
normalUnconfirmed -= 2;
seriousUnconfirmed++;
}
else if (seriousUnconfirmed > seriousThreats) { // adjust, if there are not enough serious threats
normalUnconfirmed += 2;
seriousUnconfirmed--;
}
logger.fine("Normal unconfirmed Threats: " + normalUnconfirmed + "; Serious unconfirmed Threats: " + seriousUnconfirmed);
// sane threat distribution onto phase 1 and 2
int threatsFirstPhase = threatsSum / 2 + generator.nextInt(3)-1;
int threatsSecondPhase = threatsSum - threatsFirstPhase;
if (threatsSecondPhase > threatsFirstPhase && threatsSecondPhase - threatsFirstPhase > 1) {
threatsSecondPhase--;
threatsFirstPhase++;
} else if (threatsSecondPhase < threatsFirstPhase && threatsFirstPhase - threatsSecondPhase > 1) {
threatsSecondPhase++;
threatsFirstPhase--;
}
logger.fine("Threats 1st phase: " + threatsFirstPhase + "; Threats 2nd phase: " + threatsSecondPhase);
// phases
ArrayList<Integer> phaseOne = new ArrayList<Integer>(4);
for (int i = 1; i <= 4; i++) phaseOne.add(new Integer(i));
ArrayList<Integer> phaseTwo = new ArrayList<Integer>(4);
for (int i = 5; i <= 8; i++) phaseTwo.add(new Integer(i));
// remove random entries from the phases
for (int i = 0; i < 4-threatsFirstPhase; i++) {
phaseOne.remove(generator.nextInt(phaseOne.size()));
}
for (int i = 0; i < 4-threatsSecondPhase; i++) {
phaseTwo.remove(generator.nextInt(phaseTwo.size()));
}
// free memory
ArrayList<Integer> phases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
ArrayList<Integer> internalphases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
for (int i = 0; i < threatsFirstPhase; i++) {
phases.add(phaseOne.get(i));
internalphases.add(phaseOne.get(i));
}
for (int i = 0; i < threatsSecondPhase; i++) {
phases.add(phaseTwo.get(i));
internalphases.add(phaseTwo.get(i));
}
phaseOne = null; phaseTwo = null;
// create threats by level
threats = new ThreatGroup[8];
for (int i = 0; i < 8; i++) {
threats[i] = new ThreatGroup();
}
// counter for maximum internal threats
int internalThreatsNumber = 0;
//statistics counter to make internal threats likely, too
int externalThreatLevelLeft = externalThreats;
for (int i = 0; i < threatsSum; i++) {
Threat newThreat = new Threat(); // new threat created
if (i < seriousThreats) {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_SERIOUS);
// unconfirmed reports
if (seriousUnconfirmed > 0) {
seriousUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
else {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_NORMAL);
// unconfirmed reports
if (normalUnconfirmed > 0) {
normalUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
// internal/external?
if (generator.nextInt(threatsSum - i) + 1 <= externalThreatLevelLeft) {
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (externalThreatLevelLeft == 1) { // not enough external threat level left => make internal
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
} else { // serious threat level deduction
externalThreatLevelLeft -= 2;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else { // normal threat level deduction
externalThreatLevelLeft--;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else {
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
}
if (internalThreatsNumber > maxInternalThreatsNumber) {
logger.info("Too many internal threats. Redoing.");
return false;
}
// define phase
int maxCounter = 3; // try three times before giving up
boolean found = false;
do {
int idx = generator.nextInt(phases.size());
int phase = phases.get(idx).intValue();
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
idx = generator.nextInt(internalphases.size());
phase = internalphases.get(idx).intValue();
}
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTSeriousExternalThreat || phase > maxTSeriousExternalThreat) continue;
} else {
if (phase < minTSeriousInternalThreat || phase > maxTSeriousInternalThreat) continue;
}
} else {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTNormalExternalThreat|| phase > maxTNormalExternalThreat) continue;
} else {
if (phase < minTNormalInternalThreat || phase > maxTNormalInternalThreat) continue;
}
}
found = true;
newThreat.setTime(phase);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
if (idx < internalphases.size() - 1) {
internalphases.remove(idx+1);
}
internalphases.remove(idx);
} else {
phases.remove(idx);
}
} while(!found && maxCounter-- > 0);
if (!found) {
logger.info("Could not create mission due to phase restrictions. Redoing.");
return false;
}
//System.out.println(newThreat);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
threats[newThreat.getTime() - 1].addInternal(newThreat);
} else {
threats[newThreat.getTime() - 1].addExternal(newThreat);
}
} // for (int i = 0; i < threatsSum; i++) {
// TODO: check if there are two internal threats in a row - if there are, redo mission
// now sort mission entries and generate attack sectors
int lastSector = -1;
for (int i = 0; i < 8; i++) {
Threat x = threats[i].getExternal();
if (x != null) {
switch(generator.nextInt(3)) {
case 0: if (lastSector != Threat.THREAT_SECTOR_BLUE) x.setSector(Threat.THREAT_SECTOR_BLUE);
else x.setSector(Threat.THREAT_SECTOR_WHITE); break;
case 1: if (lastSector != Threat.THREAT_SECTOR_WHITE) x.setSector(Threat.THREAT_SECTOR_WHITE);
else x.setSector(Threat.THREAT_SECTOR_RED); break;
case 2: if (lastSector != Threat.THREAT_SECTOR_RED) x.setSector(Threat.THREAT_SECTOR_RED);
else x.setSector(Threat.THREAT_SECTOR_BLUE); break;
default: System.out.println("No Way!");
}
threats[i].addExternal(x);
lastSector = x.getSector();
}
//if (threats[i] != null) System.out.println(threats[i]);
}
// for (int i = 0; i < 8; i++) {
// System.out.println(i);
// System.out.println(threats[i].getInternal());
// System.out.println(threats[i].getExternal());
// }
return true;
}
/**
* Generate data operations (either data transfer or incoming data)
* @return true, if data creation could be generated
*/
protected boolean generateDataOperations() {
// clear data
incomingData = new int[3];
dataTransfers = new int[3];
int incomingSum = 0;
int transferSum = 0;
// generate stuff by phase
for (int i = 0; i < 3; i++) {
incomingData[i] = generator.nextInt(maxIncomingData[i] - minIncomingData[i] + 1) + minIncomingData[i];
dataTransfers[i] = generator.nextInt(maxDataTransfer[i] - minDataTransfer[i] + 1) + minDataTransfer[i];
// check minimums
if (incomingData[i] + dataTransfers[i] < minDataOperations[i] ||
incomingData[i] + dataTransfers[i] > maxDataOperations[i]) return false;
incomingSum += incomingData[i];
transferSum += dataTransfers[i];
}
// check minimums
if (incomingSum < minIncomingDataTotal || transferSum < minDataTransferTotal) return false;
// debuggin information
if (logger.getLevel() == Level.FINE) {
for (int i = 0; i < 3; i++) {
logger.fine("Phase " + (i+1) + ": Incoming Data = " + incomingData[i] + "; Data Transfers = " + dataTransfers[i]);
}
}
return true;
}
/**
* simple generation of times for phases, white noise etc.
*/
protected void generateTimes() {
// generate white noise
int whiteNoiseTime = generator.nextInt(maxWhiteNoise - minWhiteNoise + 1) + minWhiteNoise;
logger.fine("White noise time: " + whiteNoiseTime);
// create chunks
ArrayList<Integer> whiteNoiseChunks = new ArrayList<Integer>();
while (whiteNoiseTime > 0) {
// create random chunk
int chunk = generator.nextInt(maxWhiteNoiseTime - minWhiteNoiseTime + 1) + minWhiteNoiseTime;
// check if there is enough time left
if (chunk > whiteNoiseTime) {
// hard case: smaller than minimum time
if (chunk < minWhiteNoiseTime) {
// add to last chunk that fits
for (int i = whiteNoiseChunks.size()-1; i >= 0; i--) {
int sumChunk = whiteNoiseChunks.get(i) + chunk;
// if smaller than maximum time: add to this chunk
if (sumChunk <= maxWhiteNoiseTime) {
whiteNoiseChunks.set(i, sumChunk);
whiteNoiseTime = 0;
break;
}
}
// still not zeroed
if (whiteNoiseTime > 0) { // add to last element, regardless - quite unlikely though
int lastIdx = whiteNoiseChunks.size()-1;
whiteNoiseChunks.set(lastIdx, whiteNoiseChunks.get(lastIdx) + chunk);
whiteNoiseTime = 0;
}
} else { // easy case: create smaller rest chunk
whiteNoiseChunks.add(whiteNoiseTime);
whiteNoiseTime = 0;
}
} else { // add new chunk
whiteNoiseChunks.add(chunk);
whiteNoiseTime -= chunk;
}
}
// ok, add chunks to mission
whiteNoise = new WhiteNoise[whiteNoiseChunks.size()];
for (int i = 0; i < whiteNoiseChunks.size(); i++) whiteNoise[i] = new WhiteNoise(whiteNoiseChunks.get(i));
// add mission lengths
phaseTimes = new int[3];
for (int i = 0; i < 3; i++) {
phaseTimes[i] = generator.nextInt(maxPhaseTime[i] - minPhaseTime[i] + 1) + minPhaseTime[i];
}
}
/**
* generate phase stuff from data above
* @return true if phase generation succeeded
*/
protected boolean generatePhases() {
logger.info("Data gathered: Generating phases.");
// create events
eventList = new EventList();
// add fixed events: announcements
eventList.addPhaseEvents(phaseTimes[0], phaseTimes[1], phaseTimes[2]);
boolean ambushOccured = false;
// add threats in first phase
// ambush handling - is there a phase 4, and it is a normal external threat? ... and chance is taken?
Threat maybeAmbush = threats[3].getExternal();
if (maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[0]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] - 59;
logger.info("Ambush in phase 1 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[3].removeExternal();
ambushOccured = true; // to disallow two ambushes in one game
}
// to be used further down
int[] lastThreatTime = { 0, 0 };
// add the rest of the threats
int currentTime = generator.nextInt(maxTimeForFirst[0] - minTimeForFirst[0] + 1) + minTimeForFirst[0];
// threats should appear within this time
int lastTime = (int) (phaseTimes[0] * (((float)threatsWithInPercent) / 100));
boolean first = true;
// look for first threat
for (int i = 0; i <= 3; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next threat appears
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
+ if (lastTime < currentTime) return false;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 3) lastThreatTime[0] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
// add threats in second phase
// ambush handling - is there a phase 8, and it is a normal external threat? ... and chance is taken?
maybeAmbush = threats[7].getExternal();
if (!ambushOccured && maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[1]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] + phaseTimes[1] - 59;
logger.info("Ambush in phase 2 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[7].removeExternal();
}
// add the rest of the threats
currentTime = phaseTimes[0] + generator.nextInt(maxTimeForFirst[1] - minTimeForFirst[1] + 1) + minTimeForFirst[1];
// threats should appear within this time
lastTime = phaseTimes[0] + (int) (phaseTimes[1] * (((float)threatsWithInPercent) / 100));
first = true;
// look for first threat
for (int i = 4; i <= 7; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list in second phase (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
+ if (lastTime < currentTime) return false;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 7) lastThreatTime[1] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
//add data transfers
// get start and end times
int startTime = 0;
int endTime = 0;
// special balance: first data transfers in phase 1 and 2 should occur shortly after first threat wave
for (int i = 0; i < 2; i++) {
startTime = endTime;
endTime += phaseTimes[i];
if (dataTransfers[i] > 0) { // if there is a data transfer
startTime = lastThreatTime[i];
boolean done = false; // try until it fits
do { // try to add incoming data within 30 seconds of event
startTime = generator.nextInt(31) + startTime + 1;
done = eventList.addEvent(startTime, new DataTransfer());
} while (!done && startTime < endTime);
if (done) {
// reduce data transfers below
dataTransfers[i]--;
}
}
}
startTime = 0;
endTime = 0;
// distribute rest of data transfers and incoming data randomly within the phases
for (int i = 0; i < 3; i++) {
// recalculate phase times
startTime = endTime;
endTime += phaseTimes[i];
// data transfer first, since these are fairly long
- for (int j = 0; j < dataTransfers[j]; j++) {
+ for (int j = 0; j < dataTransfers[i]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new DataTransfer());
} while (!done);
}
// incoming data second
for (int j = 0; j < incomingData[j]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new IncomingData());
} while (!done);
}
}
//add white noise at random times
for (int i = 0; i < whiteNoise.length; i++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(phaseTimes[0] + phaseTimes[1] + phaseTimes[2] - 30) + 10;
done = eventList.addEvent(time, whiteNoise[i]);
} while (!done);
}
return true;
}
/**
* Prints list of missions
*/
@Override
public String toString() {
return eventList.toString();
}
}
| false | true | protected boolean generateThreats() {
// number of internal threats
int internalThreats = generator.nextInt(maxInternalThreats - minInternalThreats + 1) + minInternalThreats;
int externalThreats = threatLevel - internalThreats;
logger.fine("Threat Level: " + threatLevel + "; interal = " + internalThreats + ", external = " + externalThreats);
// generate number of serious threats
int seriousThreats = generator.nextInt(threatLevel / 2 + 1);
// if we only have serious threats and normal unconfirmed reports: reduce number of threats by 1
if (threatUnconfirmed % 2 == 1 && seriousThreats * 2 == threatLevel)
seriousThreats--;
int normalThreats = threatLevel - seriousThreats * 2;
logger.fine("Normal Threats: " + normalThreats + "; Serious Threats: " + seriousThreats);
// if there are 8 normal threats - check again, if we really want this
if (normalThreats == 8 && generator.nextInt(3) != 0) {
logger.info("8 normal threats unlikely. Redoing.");
return false;
}
// get sums
int threatsSum = normalThreats + seriousThreats;
// if threat level is higher than 8, create serious threats until we have a threat level of 8 or lower
// thanks to Leif Norcott from BoardGameGeek
while (threatsSum > 8) {
normalThreats -= 2;
seriousThreats++;
threatsSum = normalThreats + seriousThreats;
}
// distribute unconfirmed
int seriousUnconfirmed = generator.nextInt(threatUnconfirmed / 2 + 1);
int normalUnconfirmed = threatUnconfirmed - seriousUnconfirmed * 2;
if (normalUnconfirmed > normalThreats) { // adjust, if there are not enough threats
normalUnconfirmed -= 2;
seriousUnconfirmed++;
}
else if (seriousUnconfirmed > seriousThreats) { // adjust, if there are not enough serious threats
normalUnconfirmed += 2;
seriousUnconfirmed--;
}
logger.fine("Normal unconfirmed Threats: " + normalUnconfirmed + "; Serious unconfirmed Threats: " + seriousUnconfirmed);
// sane threat distribution onto phase 1 and 2
int threatsFirstPhase = threatsSum / 2 + generator.nextInt(3)-1;
int threatsSecondPhase = threatsSum - threatsFirstPhase;
if (threatsSecondPhase > threatsFirstPhase && threatsSecondPhase - threatsFirstPhase > 1) {
threatsSecondPhase--;
threatsFirstPhase++;
} else if (threatsSecondPhase < threatsFirstPhase && threatsFirstPhase - threatsSecondPhase > 1) {
threatsSecondPhase++;
threatsFirstPhase--;
}
logger.fine("Threats 1st phase: " + threatsFirstPhase + "; Threats 2nd phase: " + threatsSecondPhase);
// phases
ArrayList<Integer> phaseOne = new ArrayList<Integer>(4);
for (int i = 1; i <= 4; i++) phaseOne.add(new Integer(i));
ArrayList<Integer> phaseTwo = new ArrayList<Integer>(4);
for (int i = 5; i <= 8; i++) phaseTwo.add(new Integer(i));
// remove random entries from the phases
for (int i = 0; i < 4-threatsFirstPhase; i++) {
phaseOne.remove(generator.nextInt(phaseOne.size()));
}
for (int i = 0; i < 4-threatsSecondPhase; i++) {
phaseTwo.remove(generator.nextInt(phaseTwo.size()));
}
// free memory
ArrayList<Integer> phases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
ArrayList<Integer> internalphases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
for (int i = 0; i < threatsFirstPhase; i++) {
phases.add(phaseOne.get(i));
internalphases.add(phaseOne.get(i));
}
for (int i = 0; i < threatsSecondPhase; i++) {
phases.add(phaseTwo.get(i));
internalphases.add(phaseTwo.get(i));
}
phaseOne = null; phaseTwo = null;
// create threats by level
threats = new ThreatGroup[8];
for (int i = 0; i < 8; i++) {
threats[i] = new ThreatGroup();
}
// counter for maximum internal threats
int internalThreatsNumber = 0;
//statistics counter to make internal threats likely, too
int externalThreatLevelLeft = externalThreats;
for (int i = 0; i < threatsSum; i++) {
Threat newThreat = new Threat(); // new threat created
if (i < seriousThreats) {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_SERIOUS);
// unconfirmed reports
if (seriousUnconfirmed > 0) {
seriousUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
else {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_NORMAL);
// unconfirmed reports
if (normalUnconfirmed > 0) {
normalUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
// internal/external?
if (generator.nextInt(threatsSum - i) + 1 <= externalThreatLevelLeft) {
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (externalThreatLevelLeft == 1) { // not enough external threat level left => make internal
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
} else { // serious threat level deduction
externalThreatLevelLeft -= 2;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else { // normal threat level deduction
externalThreatLevelLeft--;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else {
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
}
if (internalThreatsNumber > maxInternalThreatsNumber) {
logger.info("Too many internal threats. Redoing.");
return false;
}
// define phase
int maxCounter = 3; // try three times before giving up
boolean found = false;
do {
int idx = generator.nextInt(phases.size());
int phase = phases.get(idx).intValue();
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
idx = generator.nextInt(internalphases.size());
phase = internalphases.get(idx).intValue();
}
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTSeriousExternalThreat || phase > maxTSeriousExternalThreat) continue;
} else {
if (phase < minTSeriousInternalThreat || phase > maxTSeriousInternalThreat) continue;
}
} else {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTNormalExternalThreat|| phase > maxTNormalExternalThreat) continue;
} else {
if (phase < minTNormalInternalThreat || phase > maxTNormalInternalThreat) continue;
}
}
found = true;
newThreat.setTime(phase);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
if (idx < internalphases.size() - 1) {
internalphases.remove(idx+1);
}
internalphases.remove(idx);
} else {
phases.remove(idx);
}
} while(!found && maxCounter-- > 0);
if (!found) {
logger.info("Could not create mission due to phase restrictions. Redoing.");
return false;
}
//System.out.println(newThreat);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
threats[newThreat.getTime() - 1].addInternal(newThreat);
} else {
threats[newThreat.getTime() - 1].addExternal(newThreat);
}
} // for (int i = 0; i < threatsSum; i++) {
// TODO: check if there are two internal threats in a row - if there are, redo mission
// now sort mission entries and generate attack sectors
int lastSector = -1;
for (int i = 0; i < 8; i++) {
Threat x = threats[i].getExternal();
if (x != null) {
switch(generator.nextInt(3)) {
case 0: if (lastSector != Threat.THREAT_SECTOR_BLUE) x.setSector(Threat.THREAT_SECTOR_BLUE);
else x.setSector(Threat.THREAT_SECTOR_WHITE); break;
case 1: if (lastSector != Threat.THREAT_SECTOR_WHITE) x.setSector(Threat.THREAT_SECTOR_WHITE);
else x.setSector(Threat.THREAT_SECTOR_RED); break;
case 2: if (lastSector != Threat.THREAT_SECTOR_RED) x.setSector(Threat.THREAT_SECTOR_RED);
else x.setSector(Threat.THREAT_SECTOR_BLUE); break;
default: System.out.println("No Way!");
}
threats[i].addExternal(x);
lastSector = x.getSector();
}
//if (threats[i] != null) System.out.println(threats[i]);
}
// for (int i = 0; i < 8; i++) {
// System.out.println(i);
// System.out.println(threats[i].getInternal());
// System.out.println(threats[i].getExternal());
// }
return true;
}
/**
* Generate data operations (either data transfer or incoming data)
* @return true, if data creation could be generated
*/
protected boolean generateDataOperations() {
// clear data
incomingData = new int[3];
dataTransfers = new int[3];
int incomingSum = 0;
int transferSum = 0;
// generate stuff by phase
for (int i = 0; i < 3; i++) {
incomingData[i] = generator.nextInt(maxIncomingData[i] - minIncomingData[i] + 1) + minIncomingData[i];
dataTransfers[i] = generator.nextInt(maxDataTransfer[i] - minDataTransfer[i] + 1) + minDataTransfer[i];
// check minimums
if (incomingData[i] + dataTransfers[i] < minDataOperations[i] ||
incomingData[i] + dataTransfers[i] > maxDataOperations[i]) return false;
incomingSum += incomingData[i];
transferSum += dataTransfers[i];
}
// check minimums
if (incomingSum < minIncomingDataTotal || transferSum < minDataTransferTotal) return false;
// debuggin information
if (logger.getLevel() == Level.FINE) {
for (int i = 0; i < 3; i++) {
logger.fine("Phase " + (i+1) + ": Incoming Data = " + incomingData[i] + "; Data Transfers = " + dataTransfers[i]);
}
}
return true;
}
/**
* simple generation of times for phases, white noise etc.
*/
protected void generateTimes() {
// generate white noise
int whiteNoiseTime = generator.nextInt(maxWhiteNoise - minWhiteNoise + 1) + minWhiteNoise;
logger.fine("White noise time: " + whiteNoiseTime);
// create chunks
ArrayList<Integer> whiteNoiseChunks = new ArrayList<Integer>();
while (whiteNoiseTime > 0) {
// create random chunk
int chunk = generator.nextInt(maxWhiteNoiseTime - minWhiteNoiseTime + 1) + minWhiteNoiseTime;
// check if there is enough time left
if (chunk > whiteNoiseTime) {
// hard case: smaller than minimum time
if (chunk < minWhiteNoiseTime) {
// add to last chunk that fits
for (int i = whiteNoiseChunks.size()-1; i >= 0; i--) {
int sumChunk = whiteNoiseChunks.get(i) + chunk;
// if smaller than maximum time: add to this chunk
if (sumChunk <= maxWhiteNoiseTime) {
whiteNoiseChunks.set(i, sumChunk);
whiteNoiseTime = 0;
break;
}
}
// still not zeroed
if (whiteNoiseTime > 0) { // add to last element, regardless - quite unlikely though
int lastIdx = whiteNoiseChunks.size()-1;
whiteNoiseChunks.set(lastIdx, whiteNoiseChunks.get(lastIdx) + chunk);
whiteNoiseTime = 0;
}
} else { // easy case: create smaller rest chunk
whiteNoiseChunks.add(whiteNoiseTime);
whiteNoiseTime = 0;
}
} else { // add new chunk
whiteNoiseChunks.add(chunk);
whiteNoiseTime -= chunk;
}
}
// ok, add chunks to mission
whiteNoise = new WhiteNoise[whiteNoiseChunks.size()];
for (int i = 0; i < whiteNoiseChunks.size(); i++) whiteNoise[i] = new WhiteNoise(whiteNoiseChunks.get(i));
// add mission lengths
phaseTimes = new int[3];
for (int i = 0; i < 3; i++) {
phaseTimes[i] = generator.nextInt(maxPhaseTime[i] - minPhaseTime[i] + 1) + minPhaseTime[i];
}
}
/**
* generate phase stuff from data above
* @return true if phase generation succeeded
*/
protected boolean generatePhases() {
logger.info("Data gathered: Generating phases.");
// create events
eventList = new EventList();
// add fixed events: announcements
eventList.addPhaseEvents(phaseTimes[0], phaseTimes[1], phaseTimes[2]);
boolean ambushOccured = false;
// add threats in first phase
// ambush handling - is there a phase 4, and it is a normal external threat? ... and chance is taken?
Threat maybeAmbush = threats[3].getExternal();
if (maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[0]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] - 59;
logger.info("Ambush in phase 1 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[3].removeExternal();
ambushOccured = true; // to disallow two ambushes in one game
}
// to be used further down
int[] lastThreatTime = { 0, 0 };
// add the rest of the threats
int currentTime = generator.nextInt(maxTimeForFirst[0] - minTimeForFirst[0] + 1) + minTimeForFirst[0];
// threats should appear within this time
int lastTime = (int) (phaseTimes[0] * (((float)threatsWithInPercent) / 100));
boolean first = true;
// look for first threat
for (int i = 0; i <= 3; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next threat appears
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 3) lastThreatTime[0] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
// add threats in second phase
// ambush handling - is there a phase 8, and it is a normal external threat? ... and chance is taken?
maybeAmbush = threats[7].getExternal();
if (!ambushOccured && maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[1]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] + phaseTimes[1] - 59;
logger.info("Ambush in phase 2 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[7].removeExternal();
}
// add the rest of the threats
currentTime = phaseTimes[0] + generator.nextInt(maxTimeForFirst[1] - minTimeForFirst[1] + 1) + minTimeForFirst[1];
// threats should appear within this time
lastTime = phaseTimes[0] + (int) (phaseTimes[1] * (((float)threatsWithInPercent) / 100));
first = true;
// look for first threat
for (int i = 4; i <= 7; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list in second phase (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 7) lastThreatTime[1] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
//add data transfers
// get start and end times
int startTime = 0;
int endTime = 0;
// special balance: first data transfers in phase 1 and 2 should occur shortly after first threat wave
for (int i = 0; i < 2; i++) {
startTime = endTime;
endTime += phaseTimes[i];
if (dataTransfers[i] > 0) { // if there is a data transfer
startTime = lastThreatTime[i];
boolean done = false; // try until it fits
do { // try to add incoming data within 30 seconds of event
startTime = generator.nextInt(31) + startTime + 1;
done = eventList.addEvent(startTime, new DataTransfer());
} while (!done && startTime < endTime);
if (done) {
// reduce data transfers below
dataTransfers[i]--;
}
}
}
startTime = 0;
endTime = 0;
// distribute rest of data transfers and incoming data randomly within the phases
for (int i = 0; i < 3; i++) {
// recalculate phase times
startTime = endTime;
endTime += phaseTimes[i];
// data transfer first, since these are fairly long
for (int j = 0; j < dataTransfers[j]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new DataTransfer());
} while (!done);
}
// incoming data second
for (int j = 0; j < incomingData[j]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new IncomingData());
} while (!done);
}
}
//add white noise at random times
for (int i = 0; i < whiteNoise.length; i++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(phaseTimes[0] + phaseTimes[1] + phaseTimes[2] - 30) + 10;
done = eventList.addEvent(time, whiteNoise[i]);
} while (!done);
}
return true;
}
/**
* Prints list of missions
*/
@Override
public String toString() {
return eventList.toString();
}
}
| protected boolean generateThreats() {
// number of internal threats
int internalThreats = generator.nextInt(maxInternalThreats - minInternalThreats + 1) + minInternalThreats;
int externalThreats = threatLevel - internalThreats;
logger.fine("Threat Level: " + threatLevel + "; interal = " + internalThreats + ", external = " + externalThreats);
// generate number of serious threats
int seriousThreats = generator.nextInt(threatLevel / 2 + 1);
// if we only have serious threats and normal unconfirmed reports: reduce number of threats by 1
if (threatUnconfirmed % 2 == 1 && seriousThreats * 2 == threatLevel)
seriousThreats--;
int normalThreats = threatLevel - seriousThreats * 2;
logger.fine("Normal Threats: " + normalThreats + "; Serious Threats: " + seriousThreats);
// if there are 8 normal threats - check again, if we really want this
if (normalThreats == 8 && generator.nextInt(3) != 0) {
logger.info("8 normal threats unlikely. Redoing.");
return false;
}
// get sums
int threatsSum = normalThreats + seriousThreats;
// if threat level is higher than 8, create serious threats until we have a threat level of 8 or lower
// thanks to Leif Norcott from BoardGameGeek
while (threatsSum > 8) {
normalThreats -= 2;
seriousThreats++;
threatsSum = normalThreats + seriousThreats;
}
// distribute unconfirmed
int seriousUnconfirmed = generator.nextInt(threatUnconfirmed / 2 + 1);
int normalUnconfirmed = threatUnconfirmed - seriousUnconfirmed * 2;
if (normalUnconfirmed > normalThreats) { // adjust, if there are not enough threats
normalUnconfirmed -= 2;
seriousUnconfirmed++;
}
else if (seriousUnconfirmed > seriousThreats) { // adjust, if there are not enough serious threats
normalUnconfirmed += 2;
seriousUnconfirmed--;
}
logger.fine("Normal unconfirmed Threats: " + normalUnconfirmed + "; Serious unconfirmed Threats: " + seriousUnconfirmed);
// sane threat distribution onto phase 1 and 2
int threatsFirstPhase = threatsSum / 2 + generator.nextInt(3)-1;
int threatsSecondPhase = threatsSum - threatsFirstPhase;
if (threatsSecondPhase > threatsFirstPhase && threatsSecondPhase - threatsFirstPhase > 1) {
threatsSecondPhase--;
threatsFirstPhase++;
} else if (threatsSecondPhase < threatsFirstPhase && threatsFirstPhase - threatsSecondPhase > 1) {
threatsSecondPhase++;
threatsFirstPhase--;
}
logger.fine("Threats 1st phase: " + threatsFirstPhase + "; Threats 2nd phase: " + threatsSecondPhase);
// phases
ArrayList<Integer> phaseOne = new ArrayList<Integer>(4);
for (int i = 1; i <= 4; i++) phaseOne.add(new Integer(i));
ArrayList<Integer> phaseTwo = new ArrayList<Integer>(4);
for (int i = 5; i <= 8; i++) phaseTwo.add(new Integer(i));
// remove random entries from the phases
for (int i = 0; i < 4-threatsFirstPhase; i++) {
phaseOne.remove(generator.nextInt(phaseOne.size()));
}
for (int i = 0; i < 4-threatsSecondPhase; i++) {
phaseTwo.remove(generator.nextInt(phaseTwo.size()));
}
// free memory
ArrayList<Integer> phases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
ArrayList<Integer> internalphases = new ArrayList<Integer>(threatsFirstPhase + threatsSecondPhase);
for (int i = 0; i < threatsFirstPhase; i++) {
phases.add(phaseOne.get(i));
internalphases.add(phaseOne.get(i));
}
for (int i = 0; i < threatsSecondPhase; i++) {
phases.add(phaseTwo.get(i));
internalphases.add(phaseTwo.get(i));
}
phaseOne = null; phaseTwo = null;
// create threats by level
threats = new ThreatGroup[8];
for (int i = 0; i < 8; i++) {
threats[i] = new ThreatGroup();
}
// counter for maximum internal threats
int internalThreatsNumber = 0;
//statistics counter to make internal threats likely, too
int externalThreatLevelLeft = externalThreats;
for (int i = 0; i < threatsSum; i++) {
Threat newThreat = new Threat(); // new threat created
if (i < seriousThreats) {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_SERIOUS);
// unconfirmed reports
if (seriousUnconfirmed > 0) {
seriousUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
else {
newThreat.setThreatLevel(Threat.THREAT_LEVEL_NORMAL);
// unconfirmed reports
if (normalUnconfirmed > 0) {
normalUnconfirmed--;
newThreat.setConfirmed(false);
} else newThreat.setConfirmed(true);
}
// internal/external?
if (generator.nextInt(threatsSum - i) + 1 <= externalThreatLevelLeft) {
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (externalThreatLevelLeft == 1) { // not enough external threat level left => make internal
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
} else { // serious threat level deduction
externalThreatLevelLeft -= 2;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else { // normal threat level deduction
externalThreatLevelLeft--;
newThreat.setThreatPosition(Threat.THREAT_POSITION_EXTERNAL);
}
} else {
newThreat.setThreatPosition(Threat.THREAT_POSITION_INTERNAL);
internalThreatsNumber++;
}
if (internalThreatsNumber > maxInternalThreatsNumber) {
logger.info("Too many internal threats. Redoing.");
return false;
}
// define phase
int maxCounter = 3; // try three times before giving up
boolean found = false;
do {
int idx = generator.nextInt(phases.size());
int phase = phases.get(idx).intValue();
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
idx = generator.nextInt(internalphases.size());
phase = internalphases.get(idx).intValue();
}
if (newThreat.getThreatLevel() == Threat.THREAT_LEVEL_SERIOUS) {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTSeriousExternalThreat || phase > maxTSeriousExternalThreat) continue;
} else {
if (phase < minTSeriousInternalThreat || phase > maxTSeriousInternalThreat) continue;
}
} else {
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_EXTERNAL) {
if (phase < minTNormalExternalThreat|| phase > maxTNormalExternalThreat) continue;
} else {
if (phase < minTNormalInternalThreat || phase > maxTNormalInternalThreat) continue;
}
}
found = true;
newThreat.setTime(phase);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
if (idx < internalphases.size() - 1) {
internalphases.remove(idx+1);
}
internalphases.remove(idx);
} else {
phases.remove(idx);
}
} while(!found && maxCounter-- > 0);
if (!found) {
logger.info("Could not create mission due to phase restrictions. Redoing.");
return false;
}
//System.out.println(newThreat);
if (newThreat.getThreatPosition() == Threat.THREAT_POSITION_INTERNAL) {
threats[newThreat.getTime() - 1].addInternal(newThreat);
} else {
threats[newThreat.getTime() - 1].addExternal(newThreat);
}
} // for (int i = 0; i < threatsSum; i++) {
// TODO: check if there are two internal threats in a row - if there are, redo mission
// now sort mission entries and generate attack sectors
int lastSector = -1;
for (int i = 0; i < 8; i++) {
Threat x = threats[i].getExternal();
if (x != null) {
switch(generator.nextInt(3)) {
case 0: if (lastSector != Threat.THREAT_SECTOR_BLUE) x.setSector(Threat.THREAT_SECTOR_BLUE);
else x.setSector(Threat.THREAT_SECTOR_WHITE); break;
case 1: if (lastSector != Threat.THREAT_SECTOR_WHITE) x.setSector(Threat.THREAT_SECTOR_WHITE);
else x.setSector(Threat.THREAT_SECTOR_RED); break;
case 2: if (lastSector != Threat.THREAT_SECTOR_RED) x.setSector(Threat.THREAT_SECTOR_RED);
else x.setSector(Threat.THREAT_SECTOR_BLUE); break;
default: System.out.println("No Way!");
}
threats[i].addExternal(x);
lastSector = x.getSector();
}
//if (threats[i] != null) System.out.println(threats[i]);
}
// for (int i = 0; i < 8; i++) {
// System.out.println(i);
// System.out.println(threats[i].getInternal());
// System.out.println(threats[i].getExternal());
// }
return true;
}
/**
* Generate data operations (either data transfer or incoming data)
* @return true, if data creation could be generated
*/
protected boolean generateDataOperations() {
// clear data
incomingData = new int[3];
dataTransfers = new int[3];
int incomingSum = 0;
int transferSum = 0;
// generate stuff by phase
for (int i = 0; i < 3; i++) {
incomingData[i] = generator.nextInt(maxIncomingData[i] - minIncomingData[i] + 1) + minIncomingData[i];
dataTransfers[i] = generator.nextInt(maxDataTransfer[i] - minDataTransfer[i] + 1) + minDataTransfer[i];
// check minimums
if (incomingData[i] + dataTransfers[i] < minDataOperations[i] ||
incomingData[i] + dataTransfers[i] > maxDataOperations[i]) return false;
incomingSum += incomingData[i];
transferSum += dataTransfers[i];
}
// check minimums
if (incomingSum < minIncomingDataTotal || transferSum < minDataTransferTotal) return false;
// debuggin information
if (logger.getLevel() == Level.FINE) {
for (int i = 0; i < 3; i++) {
logger.fine("Phase " + (i+1) + ": Incoming Data = " + incomingData[i] + "; Data Transfers = " + dataTransfers[i]);
}
}
return true;
}
/**
* simple generation of times for phases, white noise etc.
*/
protected void generateTimes() {
// generate white noise
int whiteNoiseTime = generator.nextInt(maxWhiteNoise - minWhiteNoise + 1) + minWhiteNoise;
logger.fine("White noise time: " + whiteNoiseTime);
// create chunks
ArrayList<Integer> whiteNoiseChunks = new ArrayList<Integer>();
while (whiteNoiseTime > 0) {
// create random chunk
int chunk = generator.nextInt(maxWhiteNoiseTime - minWhiteNoiseTime + 1) + minWhiteNoiseTime;
// check if there is enough time left
if (chunk > whiteNoiseTime) {
// hard case: smaller than minimum time
if (chunk < minWhiteNoiseTime) {
// add to last chunk that fits
for (int i = whiteNoiseChunks.size()-1; i >= 0; i--) {
int sumChunk = whiteNoiseChunks.get(i) + chunk;
// if smaller than maximum time: add to this chunk
if (sumChunk <= maxWhiteNoiseTime) {
whiteNoiseChunks.set(i, sumChunk);
whiteNoiseTime = 0;
break;
}
}
// still not zeroed
if (whiteNoiseTime > 0) { // add to last element, regardless - quite unlikely though
int lastIdx = whiteNoiseChunks.size()-1;
whiteNoiseChunks.set(lastIdx, whiteNoiseChunks.get(lastIdx) + chunk);
whiteNoiseTime = 0;
}
} else { // easy case: create smaller rest chunk
whiteNoiseChunks.add(whiteNoiseTime);
whiteNoiseTime = 0;
}
} else { // add new chunk
whiteNoiseChunks.add(chunk);
whiteNoiseTime -= chunk;
}
}
// ok, add chunks to mission
whiteNoise = new WhiteNoise[whiteNoiseChunks.size()];
for (int i = 0; i < whiteNoiseChunks.size(); i++) whiteNoise[i] = new WhiteNoise(whiteNoiseChunks.get(i));
// add mission lengths
phaseTimes = new int[3];
for (int i = 0; i < 3; i++) {
phaseTimes[i] = generator.nextInt(maxPhaseTime[i] - minPhaseTime[i] + 1) + minPhaseTime[i];
}
}
/**
* generate phase stuff from data above
* @return true if phase generation succeeded
*/
protected boolean generatePhases() {
logger.info("Data gathered: Generating phases.");
// create events
eventList = new EventList();
// add fixed events: announcements
eventList.addPhaseEvents(phaseTimes[0], phaseTimes[1], phaseTimes[2]);
boolean ambushOccured = false;
// add threats in first phase
// ambush handling - is there a phase 4, and it is a normal external threat? ... and chance is taken?
Threat maybeAmbush = threats[3].getExternal();
if (maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[0]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] - 59;
logger.info("Ambush in phase 1 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[3].removeExternal();
ambushOccured = true; // to disallow two ambushes in one game
}
// to be used further down
int[] lastThreatTime = { 0, 0 };
// add the rest of the threats
int currentTime = generator.nextInt(maxTimeForFirst[0] - minTimeForFirst[0] + 1) + minTimeForFirst[0];
// threats should appear within this time
int lastTime = (int) (phaseTimes[0] * (((float)threatsWithInPercent) / 100));
boolean first = true;
// look for first threat
for (int i = 0; i <= 3; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next threat appears
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
if (lastTime < currentTime) return false;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 3) lastThreatTime[0] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
// add threats in second phase
// ambush handling - is there a phase 8, and it is a normal external threat? ... and chance is taken?
maybeAmbush = threats[7].getExternal();
if (!ambushOccured && maybeAmbush != null && maybeAmbush.getThreatLevel() == Threat.THREAT_LEVEL_NORMAL && generator.nextInt(100) + 1 < chanceForAmbush[1]) {
//...then add an "ambush" threat between 1 minute and 20 secs warnings
boolean done = false; // try until it fits
do {
// TODO: remove hardcoded length here:
int ambushTime = generator.nextInt(35) + phaseTimes[0] + phaseTimes[1] - 59;
logger.info("Ambush in phase 2 at time: " + ambushTime);
done = eventList.addEvent(ambushTime, maybeAmbush);
} while (!done);
threats[7].removeExternal();
}
// add the rest of the threats
currentTime = phaseTimes[0] + generator.nextInt(maxTimeForFirst[1] - minTimeForFirst[1] + 1) + minTimeForFirst[1];
// threats should appear within this time
lastTime = phaseTimes[0] + (int) (phaseTimes[1] * (((float)threatsWithInPercent) / 100));
first = true;
// look for first threat
for (int i = 4; i <= 7; i++) {
ThreatGroup now = threats[i];
Threat activeThreat;
if (now.hasExternal()) {
activeThreat = now.removeExternal();
i--;
} else if (now.hasInternal()) {
activeThreat = now.removeInternal();
i--;
} else {
continue;
}
// first event?
if (first) {
if (!eventList.addEvent(currentTime, activeThreat)) logger.warning("Could not add first event to list in second phase (time " + currentTime + ") - arg!");
first = false;
} else {
boolean done = false; // try until it fits
int nextTime = 0;
int tries = 0; // number of tries
do {
// next element occurs
int divisor = 2;
if (++tries > 10) divisor = 3;
else if (tries > 20) divisor = 4;
if (lastTime < currentTime) return false;
nextTime = generator.nextInt((lastTime - currentTime) / divisor) + 5;
if (tries > 30) return false;
done = eventList.addEvent(currentTime + nextTime, activeThreat);
} while (!done);
currentTime += nextTime;
// save lastThreatTime for data transfers further down
if (i < 7) lastThreatTime[1] = currentTime;
}
// add to time
currentTime += activeThreat.getLengthInSeconds();
}
//add data transfers
// get start and end times
int startTime = 0;
int endTime = 0;
// special balance: first data transfers in phase 1 and 2 should occur shortly after first threat wave
for (int i = 0; i < 2; i++) {
startTime = endTime;
endTime += phaseTimes[i];
if (dataTransfers[i] > 0) { // if there is a data transfer
startTime = lastThreatTime[i];
boolean done = false; // try until it fits
do { // try to add incoming data within 30 seconds of event
startTime = generator.nextInt(31) + startTime + 1;
done = eventList.addEvent(startTime, new DataTransfer());
} while (!done && startTime < endTime);
if (done) {
// reduce data transfers below
dataTransfers[i]--;
}
}
}
startTime = 0;
endTime = 0;
// distribute rest of data transfers and incoming data randomly within the phases
for (int i = 0; i < 3; i++) {
// recalculate phase times
startTime = endTime;
endTime += phaseTimes[i];
// data transfer first, since these are fairly long
for (int j = 0; j < dataTransfers[i]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new DataTransfer());
} while (!done);
}
// incoming data second
for (int j = 0; j < incomingData[j]; j++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(endTime - startTime) + startTime - 5; // to fend off events after mission ends
done = eventList.addEvent(time, new IncomingData());
} while (!done);
}
}
//add white noise at random times
for (int i = 0; i < whiteNoise.length; i++) {
boolean done = false; // try until it fits
do {
// white noise can pretty much occur everywhere
int time = generator.nextInt(phaseTimes[0] + phaseTimes[1] + phaseTimes[2] - 30) + 10;
done = eventList.addEvent(time, whiteNoise[i]);
} while (!done);
}
return true;
}
/**
* Prints list of missions
*/
@Override
public String toString() {
return eventList.toString();
}
}
|
diff --git a/src/main/java/org/jboss/as/jpa/processor/JPAAnnotationParseProcessor.java b/src/main/java/org/jboss/as/jpa/processor/JPAAnnotationParseProcessor.java
index cb84354..96d64f2 100644
--- a/src/main/java/org/jboss/as/jpa/processor/JPAAnnotationParseProcessor.java
+++ b/src/main/java/org/jboss/as/jpa/processor/JPAAnnotationParseProcessor.java
@@ -1,376 +1,376 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2011, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.jpa.processor;
import org.jboss.as.ee.component.Attachments;
import org.jboss.as.ee.component.BindingConfiguration;
import org.jboss.as.ee.component.ClassConfigurator;
import org.jboss.as.ee.component.ComponentConfiguration;
import org.jboss.as.ee.component.ComponentConfigurator;
import org.jboss.as.ee.component.ComponentDescription;
import org.jboss.as.ee.component.EEModuleClassConfiguration;
import org.jboss.as.ee.component.EEModuleClassDescription;
import org.jboss.as.ee.component.EEModuleDescription;
import org.jboss.as.ee.component.FieldInjectionTarget;
import org.jboss.as.ee.component.InjectionSource;
import org.jboss.as.ee.component.InjectionTarget;
import org.jboss.as.ee.component.LookupInjectionSource;
import org.jboss.as.ee.component.MethodInjectionTarget;
import org.jboss.as.ee.component.ResourceInjectionConfiguration;
import org.jboss.as.ee.component.ViewConfiguration;
import org.jboss.as.ee.component.ViewConfigurator;
import org.jboss.as.ee.component.ViewDescription;
import org.jboss.as.ejb3.component.stateful.StatefulComponentDescription;
import org.jboss.as.ejb3.component.stateless.StatelessComponentDescription;
import org.jboss.as.jpa.container.PersistenceUnitSearch;
import org.jboss.as.jpa.injectors.PersistenceContextInjectionSource;
import org.jboss.as.jpa.injectors.PersistenceUnitInjectionSource;
import org.jboss.as.jpa.interceptor.SBInvocationInterceptor;
import org.jboss.as.jpa.interceptor.SFSBCreateInterceptorFactory;
import org.jboss.as.jpa.interceptor.SFSBDestroyInterceptorFactory;
import org.jboss.as.jpa.interceptor.SFSBInvocationInterceptor;
import org.jboss.as.jpa.service.PersistenceUnitService;
import org.jboss.as.server.deployment.DeploymentPhaseContext;
import org.jboss.as.server.deployment.DeploymentUnit;
import org.jboss.as.server.deployment.DeploymentUnitProcessingException;
import org.jboss.as.server.deployment.DeploymentUnitProcessor;
import org.jboss.as.server.deployment.annotation.CompositeIndex;
import org.jboss.invocation.ImmediateInterceptorFactory;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.FieldInfo;
import org.jboss.jandex.MethodInfo;
import org.jboss.msc.service.ServiceName;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.PersistenceUnit;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Handle PersistenceContext and PersistenceUnit annotations.
*
* @author Scott Marlow (based on ResourceInjectionAnnotationParsingProcessor)
*/
public class JPAAnnotationParseProcessor implements DeploymentUnitProcessor {
private static final DotName PERSISTENCE_CONTEXT_ANNOTATION_NAME = DotName.createSimple(PersistenceContext.class.getName());
private static final DotName PERSISTENCE_UNIT_ANNOTATION_NAME = DotName.createSimple(PersistenceUnit.class.getName());
private static final String ENTITY_MANAGER_CLASS = "javax.persistence.EntityManager";
private static final String ENTITY_MANAGERFACTORY_CLASS = "javax.persistence.EntityManagerFactory";
@Override
public void deploy(DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException {
final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit();
final EEModuleDescription eeModuleDescription = deploymentUnit.getAttachment(Attachments.EE_MODULE_DESCRIPTION);
final CompositeIndex index = deploymentUnit.getAttachment(org.jboss.as.server.deployment.Attachments.COMPOSITE_ANNOTATION_INDEX);
// @PersistenceContext
List<AnnotationInstance> persistenceContexts = index.getAnnotations(PERSISTENCE_CONTEXT_ANNOTATION_NAME);
// create binding and injection configurations out of the @PersistenceContext annotations
this.processPersistenceAnnotations(deploymentUnit, eeModuleDescription, persistenceContexts);
// @PersistenceUnit
List<AnnotationInstance> persistenceUnits = index.getAnnotations(PERSISTENCE_UNIT_ANNOTATION_NAME);
// create binding and injection configurations out of the @PersistenceUnit annotaitons
this.processPersistenceAnnotations(deploymentUnit, eeModuleDescription, persistenceUnits);
// if we found any @PersistenceContext or @PersistenceUnit annotations then mark this as a JPA deployment
if (!persistenceContexts.isEmpty() || !persistenceUnits.isEmpty()) {
JPADeploymentMarker.mark(deploymentUnit);
}
}
@Override
public void undeploy(DeploymentUnit context) {
}
private void processPersistenceAnnotations(final DeploymentUnit deploymentUnit, final EEModuleDescription eeModuleDescription, List<AnnotationInstance> persistenceContexts) throws DeploymentUnitProcessingException {
for (AnnotationInstance annotation : persistenceContexts) {
ClassInfo declaringClass = null;
final AnnotationTarget annotationTarget = annotation.target();
if (annotationTarget instanceof FieldInfo) {
FieldInfo fieldInfo = (FieldInfo) annotationTarget;
declaringClass = fieldInfo.declaringClass();
EEModuleClassDescription eeModuleClassDescription = eeModuleDescription.getOrAddClassByName(declaringClass.name().toString());
this.processField(deploymentUnit, annotation, fieldInfo, eeModuleClassDescription);
} else if (annotationTarget instanceof MethodInfo) {
MethodInfo methodInfo = (MethodInfo) annotationTarget;
declaringClass = methodInfo.declaringClass();
EEModuleClassDescription eeModuleClassDescription = eeModuleDescription.getOrAddClassByName(declaringClass.name().toString());
this.processMethod(deploymentUnit, annotation, methodInfo, eeModuleClassDescription);
} else if (annotationTarget instanceof ClassInfo) {
declaringClass = (ClassInfo) annotationTarget;
EEModuleClassDescription eeModuleClassDescription = eeModuleDescription.getOrAddClassByName(declaringClass.name().toString());
this.processClass(deploymentUnit, annotation, declaringClass, eeModuleClassDescription);
}
// setup interceptors if the annotation is on a component
if (declaringClass != null) {
// TODO: This may not always work? For example : What if this deployer runs before the components have been
// added to the EEModuleDescription?
ComponentDescription componentDescription = eeModuleDescription.getComponentByClassName(declaringClass.name().toString());
// if it's a component then setup the interceptors
if (componentDescription != null) {
this.registerInterceptorsForExtendedPersistenceContext(componentDescription, annotation);
}
}
}
}
private void processField(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, final FieldInfo fieldInfo,
final EEModuleClassDescription eeModuleClassDescription) throws DeploymentUnitProcessingException {
final String fieldName = fieldInfo.name();
final AnnotationValue declaredNameValue = annotation.value("name");
final String declaredName = declaredNameValue != null ? declaredNameValue.asString() : null;
final String localContextName;
if (declaredName == null || declaredName.isEmpty()) {
localContextName = fieldName;
} else {
localContextName = declaredName;
}
//final AnnotationValue declaredTypeValue = annotation.value("type");
final DotName declaredTypeDotName = fieldInfo.type().name();
final DotName injectionTypeDotName = declaredTypeDotName == null || declaredTypeDotName.toString().equals(Object.class.getName()) ? fieldInfo.type().name() : declaredTypeDotName;
final String injectionType = injectionTypeDotName.toString();
final InjectionSource bindingSource = this.getBindingSource(deploymentUnit, annotation, injectionType);
final BindingConfiguration bindingConfiguration = new BindingConfiguration(localContextName, bindingSource);
// add the binding configuration to the class description
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getBindingConfigurations().add(bindingConfiguration);
}
});
// setup the injection target
final InjectionTarget injectionTarget = new FieldInjectionTarget(fieldInfo.declaringClass().name().toString(), fieldName, fieldInfo.type().name().toString());
// source is always local ENC jndi
final InjectionSource injectionSource = new LookupInjectionSource(localContextName);
final ResourceInjectionConfiguration injectionConfiguration = new ResourceInjectionConfiguration(injectionTarget, injectionSource);
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getInjectionConfigurations().add(injectionConfiguration);
}
});
}
private void processMethod(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, final MethodInfo methodInfo,
final EEModuleClassDescription eeModuleClassDescription) throws DeploymentUnitProcessingException {
final String methodName = methodInfo.name();
if (!methodName.startsWith("set") || methodInfo.args().length != 1) {
throw new IllegalArgumentException("injection target is invalid. Only setter methods are allowed: " + methodInfo);
}
final String contextNameSuffix = methodName.substring(3, 4).toLowerCase() + methodName.substring(4);
final AnnotationValue declaredNameValue = annotation.value("name");
final String declaredName = declaredNameValue != null ? declaredNameValue.asString() : null;
final String localContextName;
if (declaredName == null || declaredName.isEmpty()) {
localContextName = methodInfo.declaringClass().name().toString() + "/" + contextNameSuffix;
} else {
localContextName = declaredName;
}
final DotName declaredTypeDotName = methodInfo.returnType().name();
final DotName injectionTypeDotName = declaredTypeDotName == null || declaredTypeDotName.toString().equals(Object.class.getName()) ? methodInfo.returnType().name() : declaredTypeDotName;
final String injectionType = injectionTypeDotName.toString();
final InjectionSource bindingSource = this.getBindingSource(deploymentUnit, annotation, injectionType);
final BindingConfiguration bindingConfiguration = new BindingConfiguration(localContextName, bindingSource);
// setup the binding configuration in the class description
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getBindingConfigurations().add(bindingConfiguration);
}
});
// setup the injection configuration
- final InjectionTarget injectionTarget = new MethodInjectionTarget(methodName, methodInfo.declaringClass().name().toString(), methodInfo.returnType().name().toString());
+ final InjectionTarget injectionTarget = new MethodInjectionTarget(methodInfo.declaringClass().name().toString(), methodName, methodInfo.returnType().name().toString());
// source is always local ENC jndi name
final InjectionSource injectionSource = new LookupInjectionSource(localContextName);
final ResourceInjectionConfiguration injectionConfiguration = new ResourceInjectionConfiguration(injectionTarget, injectionSource);
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getInjectionConfigurations().add(injectionConfiguration);
}
});
}
private void processClass(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, final ClassInfo classInfo,
final EEModuleClassDescription eeModuleClassDescription) throws DeploymentUnitProcessingException {
final AnnotationValue nameValue = annotation.value("name");
if (nameValue == null || nameValue.asString().isEmpty()) {
throw new IllegalArgumentException("Class level annotations must provide a name.");
}
final String name = nameValue.asString();
String type = getClassLevelInjectionType(annotation);
InjectionSource bindingSource = this.getBindingSource(deploymentUnit, annotation, type);
final BindingConfiguration bindingConfiguration = new BindingConfiguration(name, bindingSource);
// setup the binding configuration in the class description
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getBindingConfigurations().add(bindingConfiguration);
}
});
}
private InjectionSource getBindingSource(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, String injectionTypeName)
throws DeploymentUnitProcessingException {
String scopedPuName = getScopedPuName(deploymentUnit, annotation);
ServiceName puServiceName = getPuServiceName(scopedPuName);
if (isPersistenceContext(annotation)) {
AnnotationValue pcType = annotation.value("type");
PersistenceContextType type = (pcType == null || PersistenceContextType.TRANSACTION.name().equals(pcType.asString()))
? PersistenceContextType.TRANSACTION : PersistenceContextType.EXTENDED;
Map properties;
AnnotationValue value = annotation.value("properties");
AnnotationInstance[] props = value != null ? value.asNestedArray() : null;
if (props != null) {
properties = new HashMap();
for (int source = 0; source < props.length; source++) {
properties.put(props[source].value("name"), props[source].value("value"));
}
} else {
properties = null;
}
return new PersistenceContextInjectionSource(type, properties, puServiceName, deploymentUnit, scopedPuName, injectionTypeName);
} else {
return new PersistenceUnitInjectionSource(puServiceName, deploymentUnit, scopedPuName, injectionTypeName);
}
}
private boolean isExtendedPersistenceContext(final AnnotationInstance annotation) {
AnnotationValue value = annotation.value("type");
return annotation.name().local().equals("PersistenceContext") &&
(value != null && PersistenceContextType.EXTENDED.name().equals(value.asString()));
}
private boolean isPersistenceContext(final AnnotationInstance annotation) {
return annotation.name().local().equals("PersistenceContext");
}
/**
* Based on the the annotation type, its either entitymanager or entitymanagerfactory
*
* @param annotation
* @return
*/
private String getClassLevelInjectionType(final AnnotationInstance annotation) {
boolean isPC = annotation.name().local().equals("PersistenceContext");
return isPC ? ENTITY_MANAGER_CLASS : ENTITY_MANAGERFACTORY_CLASS;
}
private String getScopedPuName(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation)
throws DeploymentUnitProcessingException {
final AnnotationValue puName = annotation.value("unitName");
String scopedPuName;
String searchName = null; // note: a null searchName will match the first PU definition found
if (puName != null) {
searchName = puName.asString();
}
scopedPuName = PersistenceUnitSearch.resolvePersistenceUnitSupplier(deploymentUnit, searchName);
if (null == scopedPuName) {
throw new DeploymentUnitProcessingException("Can't find a deployment unit named " + puName.asString() + " at " + deploymentUnit);
}
return scopedPuName;
}
private ServiceName getPuServiceName(String scopedPuName)
throws DeploymentUnitProcessingException {
return PersistenceUnitService.getPUServiceName(scopedPuName);
}
// Register our listeners on SFSB that will be created
private void registerInterceptorsForExtendedPersistenceContext(ComponentDescription componentDescription, AnnotationInstance annotation) {
// if it's a SFSB and extended persistence context then setup appropriate interceptors
if (componentDescription instanceof StatefulComponentDescription && isExtendedPersistenceContext(annotation)) {
// first setup the post construct and pre destroy component interceptors
componentDescription.getConfigurators().add(new ComponentConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, ComponentDescription description, ComponentConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getPostConstructInterceptors().add(new SFSBCreateInterceptorFactory());
configuration.getPreDestroyInterceptors().add(new SFSBDestroyInterceptorFactory());
}
});
// now for each view and each method on that view, setup the SFSB invocation interceptor factory
// TODO: Is there a better/efficient way of doing this? Why do we need to fetch all methods of the view
// and then setup the interceptors on each of those methods? Why not just have a construct "applies to all
// invocations on the view"?
List<ViewDescription> views = componentDescription.getViews();
for (ViewDescription view : views) {
view.getConfigurators().addFirst(new ViewConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, ComponentConfiguration componentConfiguration, ViewDescription description, ViewConfiguration configuration) throws DeploymentUnitProcessingException {
Method[] viewMethods = configuration.getProxyFactory().getCachedMethods();
for (Method viewMethod : viewMethods) {
configuration.getViewInterceptorDeque(viewMethod).addFirst(new ImmediateInterceptorFactory(SFSBInvocationInterceptor.INSTANCE));
}
}
});
}
}
// register interceptor on stateful/stateless SB with transactional entity manager.
if (!isExtendedPersistenceContext(annotation) &&
(componentDescription instanceof StatefulComponentDescription ||
componentDescription instanceof StatelessComponentDescription)) {
//TODO: this probably adds the interceptor in the wrong order
componentDescription.getConfigurators().add(new ComponentConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, ComponentDescription description, ComponentConfiguration configuration) throws DeploymentUnitProcessingException {
for (Method method : configuration.getDefinedComponentMethods()) {
configuration.getComponentInterceptorDeque(method).addFirst(new ImmediateInterceptorFactory(SBInvocationInterceptor.INSTANCE));
}
}
});
}
}
}
| true | true | private void processMethod(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, final MethodInfo methodInfo,
final EEModuleClassDescription eeModuleClassDescription) throws DeploymentUnitProcessingException {
final String methodName = methodInfo.name();
if (!methodName.startsWith("set") || methodInfo.args().length != 1) {
throw new IllegalArgumentException("injection target is invalid. Only setter methods are allowed: " + methodInfo);
}
final String contextNameSuffix = methodName.substring(3, 4).toLowerCase() + methodName.substring(4);
final AnnotationValue declaredNameValue = annotation.value("name");
final String declaredName = declaredNameValue != null ? declaredNameValue.asString() : null;
final String localContextName;
if (declaredName == null || declaredName.isEmpty()) {
localContextName = methodInfo.declaringClass().name().toString() + "/" + contextNameSuffix;
} else {
localContextName = declaredName;
}
final DotName declaredTypeDotName = methodInfo.returnType().name();
final DotName injectionTypeDotName = declaredTypeDotName == null || declaredTypeDotName.toString().equals(Object.class.getName()) ? methodInfo.returnType().name() : declaredTypeDotName;
final String injectionType = injectionTypeDotName.toString();
final InjectionSource bindingSource = this.getBindingSource(deploymentUnit, annotation, injectionType);
final BindingConfiguration bindingConfiguration = new BindingConfiguration(localContextName, bindingSource);
// setup the binding configuration in the class description
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getBindingConfigurations().add(bindingConfiguration);
}
});
// setup the injection configuration
final InjectionTarget injectionTarget = new MethodInjectionTarget(methodName, methodInfo.declaringClass().name().toString(), methodInfo.returnType().name().toString());
// source is always local ENC jndi name
final InjectionSource injectionSource = new LookupInjectionSource(localContextName);
final ResourceInjectionConfiguration injectionConfiguration = new ResourceInjectionConfiguration(injectionTarget, injectionSource);
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getInjectionConfigurations().add(injectionConfiguration);
}
});
}
| private void processMethod(final DeploymentUnit deploymentUnit, final AnnotationInstance annotation, final MethodInfo methodInfo,
final EEModuleClassDescription eeModuleClassDescription) throws DeploymentUnitProcessingException {
final String methodName = methodInfo.name();
if (!methodName.startsWith("set") || methodInfo.args().length != 1) {
throw new IllegalArgumentException("injection target is invalid. Only setter methods are allowed: " + methodInfo);
}
final String contextNameSuffix = methodName.substring(3, 4).toLowerCase() + methodName.substring(4);
final AnnotationValue declaredNameValue = annotation.value("name");
final String declaredName = declaredNameValue != null ? declaredNameValue.asString() : null;
final String localContextName;
if (declaredName == null || declaredName.isEmpty()) {
localContextName = methodInfo.declaringClass().name().toString() + "/" + contextNameSuffix;
} else {
localContextName = declaredName;
}
final DotName declaredTypeDotName = methodInfo.returnType().name();
final DotName injectionTypeDotName = declaredTypeDotName == null || declaredTypeDotName.toString().equals(Object.class.getName()) ? methodInfo.returnType().name() : declaredTypeDotName;
final String injectionType = injectionTypeDotName.toString();
final InjectionSource bindingSource = this.getBindingSource(deploymentUnit, annotation, injectionType);
final BindingConfiguration bindingConfiguration = new BindingConfiguration(localContextName, bindingSource);
// setup the binding configuration in the class description
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getBindingConfigurations().add(bindingConfiguration);
}
});
// setup the injection configuration
final InjectionTarget injectionTarget = new MethodInjectionTarget(methodInfo.declaringClass().name().toString(), methodName, methodInfo.returnType().name().toString());
// source is always local ENC jndi name
final InjectionSource injectionSource = new LookupInjectionSource(localContextName);
final ResourceInjectionConfiguration injectionConfiguration = new ResourceInjectionConfiguration(injectionTarget, injectionSource);
eeModuleClassDescription.getConfigurators().add(new ClassConfigurator() {
@Override
public void configure(DeploymentPhaseContext context, EEModuleClassDescription description, EEModuleClassConfiguration configuration) throws DeploymentUnitProcessingException {
configuration.getInjectionConfigurations().add(injectionConfiguration);
}
});
}
|
diff --git a/bundles/org.eclipse.team.ui/src/org/eclipse/team/internal/ui/synchronize/actions/SubscriberActionContribution.java b/bundles/org.eclipse.team.ui/src/org/eclipse/team/internal/ui/synchronize/actions/SubscriberActionContribution.java
index a2effa3d5..8a99c0ee7 100644
--- a/bundles/org.eclipse.team.ui/src/org/eclipse/team/internal/ui/synchronize/actions/SubscriberActionContribution.java
+++ b/bundles/org.eclipse.team.ui/src/org/eclipse/team/internal/ui/synchronize/actions/SubscriberActionContribution.java
@@ -1,103 +1,107 @@
/*******************************************************************************
* Copyright (c) 2000, 2004 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Common Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/cpl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.team.internal.ui.synchronize.actions;
import org.eclipse.core.resources.IResource;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.team.internal.ui.Policy;
import org.eclipse.team.internal.ui.Utils;
import org.eclipse.team.internal.ui.synchronize.ConfigureRefreshScheduleDialog;
import org.eclipse.team.ui.synchronize.ISynchronizePageConfiguration;
import org.eclipse.team.ui.synchronize.ISynchronizePageSite;
import org.eclipse.team.ui.synchronize.SubscriberParticipant;
import org.eclipse.team.ui.synchronize.SynchronizePageActionGroup;
import org.eclipse.ui.IActionBars;
/**
* Provides the actions to be associated with a synchronize page
*/
public final class SubscriberActionContribution extends SynchronizePageActionGroup {
// the changes viewer are contributed via the viewer and not the page.
private Action configureSchedule;
private SyncViewerShowPreferencesAction showPreferences;
private Action refreshSelectionAction;
private RemoveFromViewAction removeFromViewAction;
/* (non-Javadoc)
* @see org.eclipse.team.ui.synchronize.IActionContribution#initialize(org.eclipse.team.ui.synchronize.ISynchronizePageConfiguration)
*/
public void initialize(final ISynchronizePageConfiguration configuration) {
super.initialize(configuration);
final SubscriberParticipant participant = (SubscriberParticipant)configuration.getParticipant();
final ISynchronizePageSite site = configuration.getSite();
// toolbar
if(participant.doesSupportSynchronize()) {
refreshSelectionAction = new Action() {
public void run() {
IStructuredSelection selection = (IStructuredSelection)site.getSelectionProvider().getSelection();
IResource[] resources = Utils.getResources(selection.toArray());
+ if (resources.length == 0) {
+ // Refresh all participant resources
+ resources = participant.getResources();
+ }
participant.refresh(resources, Policy.bind("Participant.synchronizing"), Policy.bind("Participant.synchronizingDetails", participant.getName()), site.getWorkbenchSite()); //$NON-NLS-1$ //$NON-NLS-2$
}
};
Utils.initAction(refreshSelectionAction, "action.refreshWithRemote."); //$NON-NLS-1$
configureSchedule = new Action() {
public void run() {
ConfigureRefreshScheduleDialog d = new ConfigureRefreshScheduleDialog(
site.getShell(), participant.getRefreshSchedule());
d.setBlockOnOpen(false);
d.open();
}
};
Utils.initAction(configureSchedule, "action.configureSchedulel."); //$NON-NLS-1$
}
showPreferences = new SyncViewerShowPreferencesAction(site.getShell());
removeFromViewAction = new RemoveFromViewAction(configuration);
}
/* (non-Javadoc)
* @see org.eclipse.team.ui.synchronize.IActionContribution#fillContextMenu(org.eclipse.jface.action.IMenuManager)
*/
public void fillContextMenu(IMenuManager manager) {
if (findGroup(manager, ISynchronizePageConfiguration.SYNCHRONIZE_GROUP) != null
&& findGroup(manager, ISynchronizePageConfiguration.NAVIGATE_GROUP) != null) {
// Place synchronize with navigato to save space
appendToGroup(manager, ISynchronizePageConfiguration.NAVIGATE_GROUP, refreshSelectionAction);
appendToGroup(manager, ISynchronizePageConfiguration.NAVIGATE_GROUP, removeFromViewAction);
} else {
appendToGroup(manager, ISynchronizePageConfiguration.SYNCHRONIZE_GROUP, refreshSelectionAction);
appendToGroup(manager, ISynchronizePageConfiguration.SYNCHRONIZE_GROUP, removeFromViewAction);
}
}
/* (non-Javadoc)
* @see org.eclipse.team.ui.synchronize.IActionContribution#setActionBars(org.eclipse.ui.IActionBars)
*/
public void fillActionBars(IActionBars actionBars) {
if(actionBars != null) {
// view menu
IMenuManager menu = actionBars.getMenuManager();
if (findGroup(menu, ISynchronizePageConfiguration.SYNCHRONIZE_GROUP) != null
&& findGroup(menu, ISynchronizePageConfiguration.PREFERENCES_GROUP) != null) {
appendToGroup(menu, ISynchronizePageConfiguration.PREFERENCES_GROUP, configureSchedule);
} else {
appendToGroup(menu, ISynchronizePageConfiguration.SYNCHRONIZE_GROUP, configureSchedule);
}
appendToGroup(menu, ISynchronizePageConfiguration.PREFERENCES_GROUP, showPreferences);
}
}
}
| true | true | public void initialize(final ISynchronizePageConfiguration configuration) {
super.initialize(configuration);
final SubscriberParticipant participant = (SubscriberParticipant)configuration.getParticipant();
final ISynchronizePageSite site = configuration.getSite();
// toolbar
if(participant.doesSupportSynchronize()) {
refreshSelectionAction = new Action() {
public void run() {
IStructuredSelection selection = (IStructuredSelection)site.getSelectionProvider().getSelection();
IResource[] resources = Utils.getResources(selection.toArray());
participant.refresh(resources, Policy.bind("Participant.synchronizing"), Policy.bind("Participant.synchronizingDetails", participant.getName()), site.getWorkbenchSite()); //$NON-NLS-1$ //$NON-NLS-2$
}
};
Utils.initAction(refreshSelectionAction, "action.refreshWithRemote."); //$NON-NLS-1$
configureSchedule = new Action() {
public void run() {
ConfigureRefreshScheduleDialog d = new ConfigureRefreshScheduleDialog(
site.getShell(), participant.getRefreshSchedule());
d.setBlockOnOpen(false);
d.open();
}
};
Utils.initAction(configureSchedule, "action.configureSchedulel."); //$NON-NLS-1$
}
showPreferences = new SyncViewerShowPreferencesAction(site.getShell());
removeFromViewAction = new RemoveFromViewAction(configuration);
}
| public void initialize(final ISynchronizePageConfiguration configuration) {
super.initialize(configuration);
final SubscriberParticipant participant = (SubscriberParticipant)configuration.getParticipant();
final ISynchronizePageSite site = configuration.getSite();
// toolbar
if(participant.doesSupportSynchronize()) {
refreshSelectionAction = new Action() {
public void run() {
IStructuredSelection selection = (IStructuredSelection)site.getSelectionProvider().getSelection();
IResource[] resources = Utils.getResources(selection.toArray());
if (resources.length == 0) {
// Refresh all participant resources
resources = participant.getResources();
}
participant.refresh(resources, Policy.bind("Participant.synchronizing"), Policy.bind("Participant.synchronizingDetails", participant.getName()), site.getWorkbenchSite()); //$NON-NLS-1$ //$NON-NLS-2$
}
};
Utils.initAction(refreshSelectionAction, "action.refreshWithRemote."); //$NON-NLS-1$
configureSchedule = new Action() {
public void run() {
ConfigureRefreshScheduleDialog d = new ConfigureRefreshScheduleDialog(
site.getShell(), participant.getRefreshSchedule());
d.setBlockOnOpen(false);
d.open();
}
};
Utils.initAction(configureSchedule, "action.configureSchedulel."); //$NON-NLS-1$
}
showPreferences = new SyncViewerShowPreferencesAction(site.getShell());
removeFromViewAction = new RemoveFromViewAction(configuration);
}
|
diff --git a/backend/src/main/java/com/gooddata/connector/backend/AbstractSqlConnectorBackend.java b/backend/src/main/java/com/gooddata/connector/backend/AbstractSqlConnectorBackend.java
index 0230dd80..e732eea4 100644
--- a/backend/src/main/java/com/gooddata/connector/backend/AbstractSqlConnectorBackend.java
+++ b/backend/src/main/java/com/gooddata/connector/backend/AbstractSqlConnectorBackend.java
@@ -1,1158 +1,1158 @@
/*
* Copyright (c) 2009, GoodData Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and
* the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the GoodData Corporation nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gooddata.connector.backend;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import au.com.bytecode.opencsv.CSVReader;
import com.gooddata.util.CSVWriter;
import org.apache.log4j.Logger;
import com.gooddata.connector.model.PdmColumn;
import com.gooddata.connector.model.PdmLookupReplication;
import com.gooddata.connector.model.PdmSchema;
import com.gooddata.connector.model.PdmTable;
import com.gooddata.exception.ConnectorBackendException;
import com.gooddata.exception.InternalErrorException;
import com.gooddata.integration.model.Column;
import com.gooddata.integration.model.DLI;
import com.gooddata.integration.model.DLIPart;
import com.gooddata.modeling.model.SourceColumn;
import com.gooddata.naming.N;
import com.gooddata.util.FileUtil;
import com.gooddata.util.JdbcUtil;
import com.gooddata.util.StringUtil;
import com.gooddata.util.JdbcUtil.StatementHandler;
/**
* GoodData abstract connector backend. This connector backend provides the base implementation that the specific
* connector backends reuse.
* Connector backend handles communication with the specific SQL database. Specifically it handles the DB connection
* and other communication specifics of the DBMS. It uses the SQL driver that generates appropriate SQL dialect.
*
* @author zd <[email protected]>
* @version 1.0
*/public abstract class AbstractSqlConnectorBackend extends AbstractConnectorBackend implements ConnectorBackend {
private static final int BATCH_SIZE = 1000;
private static Logger l = Logger.getLogger(AbstractSqlConnectorBackend.class);
// Project id
private String projectId;
// database username
private String username;
// database password
private String password;
// database connection
protected Connection connection = null;
// autoincrement syntax
protected String SYNTAX_AUTOINCREMENT = "";
// SQL concat function prefix and suffix
protected String SYNTAX_CONCAT_FUNCTION_PREFIX = "";
protected String SYNTAX_CONCAT_FUNCTION_SUFFIX = "";
protected String SYNTAX_CONCAT_OPERATOR = "";
// separates the different LABELs when we concatenate them to create an unique identifier out of them
protected String HASH_SEPARATOR = "%";
/**
* Constructor
* @param username database backend username
* @param password database backend password
* @throws IOException in case of an IO issue
*/
protected AbstractSqlConnectorBackend(String username, String password) throws IOException {
setUsername(username);
setPassword(password);
}
/**
* {@inheritDoc}
*/
public abstract void dropSnapshots();
/**
* {@inheritDoc}
*/
public void deploy(DLI dli, List<DLIPart> parts, String dir, String archiveName)
throws IOException {
deploySnapshot(dli, parts, dir, archiveName, null);
}
/**
* {@inheritDoc}
*/
public void initialize() {
Connection con;
try {
l.debug("Initializing schema.");
con = getConnection();
if(!isInitialized()) {
l.debug("Initializing system schema.");
initializeLocalProject();
l.debug("System schema initialized.");
}
initializeLocalDataSet(getPdm());
l.debug("Schema initialized.");
}
catch (SQLException e) {
throw new ConnectorBackendException("Error initializing pdm schema '" + getPdm().getName() + "'", e);
}
}
/**
* {@inheritDoc}
*/
public String listSnapshots() {
String result = "ID FROM ROWID TO ROWID TIME\n";
result += "------------------------------------------------\n";
Connection con = null;
Statement s = null;
ResultSet r = null;
try {
con = getConnection();
s = con.createStatement();
r = JdbcUtil.executeQuery(s, "SELECT id,firstid,lastid,tmstmp FROM snapshots");
for(boolean rc = r.next(); rc; rc = r.next()) {
int id = r.getInt(1);
int firstid = r.getInt(2);
int lastid = r.getInt(3);
long tmstmp = r.getLong(4);
Date tm = new Date(tmstmp);
result += id + " " + firstid + " " + lastid + " " + tm + "\n";
}
}
catch (SQLException e) {
throw new ConnectorBackendException(e);
}
finally {
try {
if(r != null)
r.close();
if (s != null)
s.close();
}
catch (SQLException ee) {
l.warn("Error closing stuff: " + ee.getMessage(), ee);
}
}
l.debug("Current snapshots: \n"+result);
return result;
}
/**
* {@inheritDoc}
*/
public int getLastSnapshotId() {
Connection con = null;
Statement s = null;
ResultSet r = null;
try {
con = getConnection();
s = con.createStatement();
r = s.executeQuery("SELECT MAX(id) FROM snapshots");
for(boolean rc = r.next(); rc; rc = r.next()) {
int id = r.getInt(1);
l.debug("Last snapshot is "+id);
return id;
}
}
catch (SQLException e) {
throw new InternalErrorException(e.getMessage());
}
finally {
try {
if(r != null)
r.close();
if(s != null)
s.close();
}
catch (SQLException ee) {
ee.printStackTrace();
}
}
throw new InternalErrorException("Can't retrieve the last snapshot number.");
}
/**
* {@inheritDoc}
*/
public boolean isInitialized() {
return exists("snapshots");
}
/**
* {@inheritDoc}
*/
public boolean exists(String tbl) {
Connection con = null;
try {
con = getConnection();
return exists(con, tbl);
}
catch (SQLException e) {
throw new InternalErrorException(e);
}
}
protected abstract Connection getConnection() throws SQLException;
/**
* {@inheritDoc}
*/
protected void initializeLocalProject() {
try {
l.debug("Executing system DDL SQL.");
Connection c = getConnection();
createSnapshotTable(c);
createFunctions(c);
l.debug("System DDL SQL execution finished.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
}
}
/**
* {@inheritDoc}
*/
protected void initializeLocalDataSet(PdmSchema schema) {
l.debug("Executing DDL SQL.");
try {
Connection c = getConnection();
for(PdmTable table : schema.getTables()) {
if (!exists(c, table.getName())) {
createTable(c, table);
if (PdmTable.PDM_TABLE_TYPE_LOOKUP.equals(table.getType())) {
prepopulateLookupTable(c, table);
} else if (PdmTable.PDM_TABLE_TYPE_CONNECTION_POINT.equals(table.getType())) {
final List<Map<String,String>> rows = prepareInitialTableLoad(table);
if (!rows.isEmpty()) {
l.warn("Prepopulating of connection point tables is not suppported (table = " + table.getName() + ")");
}
}
/*
if(PdmTable.PDM_TABLE_TYPE_SOURCE.equals(table.getType()))
indexAllTableColumns(c, table);
*/
} else {
for (PdmColumn column : table.getColumns()) {
if (!exists(c, table.getName(), column.getName())) {
addColumn(c, table, column);
/*
if (PdmTable.PDM_TABLE_TYPE_SOURCE.equals(table.getType()))
indexTableColumn(c, table, column);
*/
}
}
}
}
JdbcUtil.executeUpdate(c,
"INSERT INTO snapshots(name,firstid,lastid,tmstmp) VALUES ('" + schema.getFactTable().getName() + "',0,0,0)"
);
l.debug("DDL SQL Execution finished.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
}
}
/**
* {@inheritDoc}
*/
protected void createSnowflake(PdmSchema schema) {
try {
Connection c = getConnection();
l.debug("Executing data normalization SQL.");
//populate REFERENCEs lookups from the referenced lookups
l.debug("Executing referenced lookups replication.");
executeLookupReplicationSql(c, schema);
l.debug("Finished referenced lookups replication.");
l.debug("Executing lookup tables population.");
populateLookupTables(c, schema);
l.debug("Finished lookup tables population.");
l.debug("Executing connection point tables population.");
populateConnectionPointTables(c, schema);
l.debug("FInished connection point tables population.");
// nothing for the reference columns
l.debug("Inserting partial snapshot record.");
insertSnapshotsRecord(c, schema);
l.debug("Executing fact table population.");
insertFactsToFactTable(c, schema);
l.debug("FInished fact table population.");
l.debug("Executing fact table FK generation.");
updateFactTableFk(c, schema);
l.debug("Finished fact table FK generation.");
updateSnapshotsRecord(c, schema);
l.debug("Snapshot record updated.");
l.debug("Finished data normalization SQL.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
}
}
/**
* {@inheritDoc}
*/
protected void executeLookupReplicationSql(Connection c, PdmSchema schema) throws SQLException {
for (PdmLookupReplication lr : schema.getLookupReplications()) {
JdbcUtil.executeUpdate(c,
"DELETE FROM " + lr.getReferencingLookup()
);
JdbcUtil.executeUpdate(c,
"INSERT INTO " + lr.getReferencingLookup() + "("+N.ID+"," + lr.getReferencingColumn() +","+N.HSH+")" +
" SELECT "+ N.ID+"," + lr.getReferencedColumn() + "," + lr.getReferencedColumn() + " FROM " +
lr.getReferencedLookup()
);
}
}
/**
* {@inheritDoc}
* @throws SQLException
*/
protected boolean exists(Connection c, String tbl) throws SQLException {
DatabaseMetaData md = c.getMetaData();
ResultSet rs = md.getTables(null, null, tbl, null);
try {
return rs.next();
} finally {
if (rs != null)
rs.close();
}
}
/**
* Returns true if the specified column of the specified table exists in the DB. Case sensitive!
* @param tbl table name
* @param col column name
* @return true if the table exists, false otherwise
* @throws IllegalArgumentException if the required table does not exist
* @throws SQLException if other database related problem occures
*/
protected boolean exists(Connection c, String tbl, String col) throws SQLException {
if (!exists(c, tbl))
throw new IllegalArgumentException("Table '" + tbl + "' does not exist.");
String sql = "SELECT * FROM " + tbl + " WHERE 1=0";
Statement st = c.createStatement();
try {
ResultSet rs = st.executeQuery(sql);
try {
ResultSetMetaData md = rs.getMetaData();
int cols = md.getColumnCount();
for (int i = 1; i <= cols; i++) {
if (col.equals(md.getColumnName(i)))
return true;
}
return false;
} finally {
if (rs != null)
rs.close();
}
} finally {
if (st != null)
st.close();
}
}
/**
* Indexes all table columns
* @param c JDBC connection
* @param table target table
* @throws SQLException in case of SQL issues
*/
protected void indexAllTableColumns(Connection c, PdmTable table) throws SQLException {
for( PdmColumn column : table.getColumns()) {
indexTableColumn(c, table, column);
}
}
/**
* Indexes table's column
* @param c JDBC connection
* @param table target table
* @param column target table's columns
* @throws SQLException in case of SQL issues
*/
private void indexTableColumn(Connection c, PdmTable table, PdmColumn column) throws SQLException {
if(!column.isPrimaryKey() && !column.isUnique()) {
JdbcUtil.executeUpdate(c,"CREATE INDEX idx_" + table.getName()
+ "_" + column.getName()
+ " ON " + table.getName() + "("+column.getName()+")");
}
}
/**
* Creates a new table
* @param c JDBC connection
* @param table target table
* @throws SQLException in case of SQL issues
*/
protected void createTable(Connection c, PdmTable table) throws SQLException {
String pk = "";
String sql = "CREATE TABLE " + table.getName() + " (\n";
for( PdmColumn column : table.getColumns()) {
sql += " "+ column.getName() + " " + column.getType();
if(column.isUnique())
sql += " UNIQUE";
if(column.isAutoIncrement())
sql += " " + SYNTAX_AUTOINCREMENT;
if(column.isPrimaryKey())
if(pk != null && pk.length() > 0)
pk += "," + column.getName();
else
pk += column.getName();
sql += ",";
}
sql += " PRIMARY KEY (" + pk + "))";
JdbcUtil.executeUpdate(c, sql);
for( PdmColumn column : table.getColumns()) {
if(column.isNonUniqueIndexed()) {
indexTableColumn(c, table, column);
}
}
}
/**
* Fills the lookup table with the DISTINCT values from the source table
* @param c JDBC connection
* @param table target lookup table
* @throws SQLException in case of SQL issues
*/
private void prepopulateLookupTable(Connection c, PdmTable table) throws SQLException {
final List<Map<String,String>> rows = prepareInitialTableLoad(table);
if (rows.isEmpty())
return;
// create the list to make sure consistent keys order in the following loop
final List<String> columns = new ArrayList<String>(rows.get(0).keySet());
final String placeholders = StringUtil.join(", ", columns, "?");
for (final Map<String,String> row : rows) {
final String sql = "INSERT INTO " + table.getName() + " ("
+ N.HSH + ", " + StringUtil.join(", ", columns)
+ ") VALUES (?, " + placeholders + ")";
JdbcUtil.executeUpdate(c, sql, new StatementHandler() {
public void prepare(PreparedStatement stmt) throws SQLException {
boolean first = true;
final StringBuffer hashbf = new StringBuffer();
int index = 2;
for (final String col : columns) {
if (first)
first = false;
else
hashbf.append(HASH_SEPARATOR);
hashbf.append(row.get(col));
stmt.setString(index++, row.get(col));
}
stmt.setString(1, hashbf.toString());
}
});
}
}
/**
* Add column to the table (ALTER TABLE)
* @param c JDBC connection
* @param table target table
* @param column target column
* @throws SQLException in case of SQL issues
*/
private void addColumn(Connection c, PdmTable table, PdmColumn column) throws SQLException {
String sql = "ALTER TABLE " + table.getName() + " ADD COLUMN "
+ column.getName() + " " + column.getType();
if (column.isUnique())
sql += " UNIQUE";
JdbcUtil.executeUpdate(c, sql);
}
/**
* Creates the system snapshots table
* @param c JDBC connection
* @throws SQLException in case of a DB issue
*/
protected void createSnapshotTable(Connection c) throws SQLException {
JdbcUtil.executeUpdate(c,
"CREATE TABLE snapshots (" +
" id INT " + SYNTAX_AUTOINCREMENT + "," +
" name VARCHAR(255)," +
" tmstmp BIGINT," +
" firstid INT," +
" lastid INT," +
" PRIMARY KEY (id)" +
")"
);
}
/**
* Inserts new records to the snapshots table before the load
* @param c JDBC connection
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected void insertSnapshotsRecord(Connection c, PdmSchema schema) throws SQLException {
PdmTable factTable = schema.getFactTable();
String fact = factTable.getName();
Date dt = new Date();
{
final String sql1 = "INSERT INTO snapshots(name,tmstmp,firstid) SELECT '"+fact+"',"+dt.getTime()+",MAX("+N.ID+")+1 FROM " + fact;
JdbcUtil.executeUpdate(c, sql1);
}
{
// compensate for the fact that MAX returns NULL when there are no rows in the SELECT
final String sql2 = "UPDATE snapshots SET firstid = 0 WHERE name = '"+fact+"' AND firstid IS NULL";
JdbcUtil.executeUpdate(c, sql2);
}
}
/**
* Updates the snapshots table after load
* @param c JDBC connection
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected void updateSnapshotsRecord(Connection c, PdmSchema schema) throws SQLException {
PdmTable factTable = schema.getFactTable();
String fact = factTable.getName();
JdbcUtil.executeUpdate(c,
"UPDATE snapshots SET lastid = (SELECT MAX("+N.ID+") FROM " + fact + ") WHERE name = '" +
fact + "' AND lastid IS NULL"
);
// compensate for the fact that MAX returns NULL when there are no rows in the SELECT
JdbcUtil.executeUpdate(c,
"UPDATE snapshots SET lastid = 0 WHERE name = '" + fact + "' AND lastid IS NULL"
);
}
/**
* Inserts rows from the source table to the fact table
* @param c JDBC connection
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected abstract void insertFactsToFactTable(Connection c, PdmSchema schema) throws SQLException;
protected void populateLookupTables(Connection c, PdmSchema schema) throws SQLException {
for(PdmTable lookupTable : schema.getLookupTables()) {
populateLookupTable(c, lookupTable, schema);
}
}
/**
* Populates the connection point table
* @param c JDBC connection
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected void populateConnectionPointTables(Connection c, PdmSchema schema) throws SQLException {
for(PdmTable cpTable : schema.getConnectionPointTables())
populateConnectionPointTable(c, cpTable, schema);
}
private void updateFactTableFk(Connection c, PdmSchema schema) throws SQLException {
String fact = schema.getFactTable().getName();
String updateStatement = "";
List<PdmTable> tables = new ArrayList<PdmTable>();
tables.addAll(schema.getLookupTables());
tables.addAll(schema.getConnectionPointTables());
tables.addAll(schema.getReferenceTables());
for(PdmTable tbl : tables) {
if(updateStatement.length() > 0)
updateStatement += " , " + generateFactUpdateSetStatement(tbl, schema);
else
updateStatement += generateFactUpdateSetStatement(tbl, schema);
}
if(updateStatement.length()>0) {
updateStatement = "UPDATE " + fact + " SET " + updateStatement +
" WHERE "+N.ID+" > "+getLastId(c,fact);
JdbcUtil.executeUpdate(c, updateStatement);
}
}
/**
* Generates the UPDATE SET statement for individual lookup FK
* @param lookupTable lookup table
* @param schema PDM schema
* @return the column update clause
*/
protected String generateFactUpdateSetStatement(PdmTable lookupTable, PdmSchema schema) {
String lookup = lookupTable.getName();
String fact = schema.getFactTable().getName();
String source = schema.getSourceTable().getName();
String associatedSourceColumns = concatAssociatedSourceColumns(lookupTable);
return lookupTable.getAssociatedSourceColumn() + "_"+N.ID+" = (SELECT "+N.ID+" FROM " +
lookup + " d," + source + " o WHERE " + associatedSourceColumns + " = d."+N.HSH+" AND o."+N.SRC_ID+"= " +
fact + "."+N.ID+") ";
}
/**
* Populates lookup table
* @param c JDBC connection
* @param lookupTable lookup table
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected void populateLookupTable(Connection c, PdmTable lookupTable, PdmSchema schema) throws SQLException {
String lookup = lookupTable.getName();
String fact = schema.getFactTable().getName();
String source = schema.getSourceTable().getName();
String insertColumns = N.HSH+"," + getInsertColumns(lookupTable);
String associatedSourceColumns = getAssociatedSourceColumns(lookupTable);
String concatAssociatedSourceColumns = concatAssociatedSourceColumns(lookupTable);
String nestedSelectColumns = concatAssociatedSourceColumns+","+associatedSourceColumns;
JdbcUtil.executeUpdate(c,
"INSERT INTO " + lookup + "(" + insertColumns +
") SELECT DISTINCT " + nestedSelectColumns + " FROM " + source +
" WHERE "+N.SRC_ID+" > "+getLastId(c,fact)
);
JdbcUtil.executeUpdate(c,
"CREATE TABLE delete_ids("+N.HSH+" "+PdmColumn.PDM_COLUMN_TYPE_LONG_TEXT+", "+N.ID+" INT, PRIMARY KEY(id))"
);
JdbcUtil.executeUpdate(c,
"INSERT INTO delete_ids SELECT "+N.HSH+",max("+N.ID+") FROM "+lookup+
" GROUP by "+N.HSH+" HAVING count("+N.ID+") > 1"
);
JdbcUtil.executeUpdate(c,
"DELETE FROM "+lookup+" WHERE "+N.ID+" IN (SELECT "+N.ID+" FROM delete_ids)"
);
JdbcUtil.executeUpdate(c,
"DROP TABLE delete_ids"
);
}
/**
* Gets the last id in the snapshot table
* @param factTable the fact table
* @return the last id
*/
public int getLastId(Connection c, String factTable) {
Statement s = null;
ResultSet r = null;
try {
s = c.createStatement();
r = s.executeQuery("SELECT MAX(lastid) FROM snapshots WHERE name='" + factTable +"'");
for(boolean rc = r.next(); rc; rc = r.next()) {
int id = r.getInt(1);
l.debug("Last is is "+id);
return id;
}
}
catch (SQLException e) {
throw new InternalErrorException(e.getMessage());
}
finally {
try {
if(r != null)
r.close();
if(s != null)
s.close();
}
catch (SQLException ee) {
ee.printStackTrace();
}
}
throw new InternalErrorException("Can't retrieve the last id number.");
}
/**
* Populates connection point table
* @param c JDBC connection
* @param lookupTable connection point table
* @param schema PDM schema
* @throws SQLException in case of a DB issue
*/
protected void populateConnectionPointTable(Connection c, PdmTable lookupTable, PdmSchema schema) throws SQLException {
String lookup = lookupTable.getName();
String fact = schema.getFactTable().getName();
String source = schema.getSourceTable().getName();
String insertColumns = N.ID+","+N.HSH+"," + getInsertColumns(lookupTable);
String associatedSourceColumns = getAssociatedSourceColumns(lookupTable);
String concatAssociatedSourceColumns = concatAssociatedSourceColumns(lookupTable);
String nestedSelectColumns = N.SRC_ID+","+concatAssociatedSourceColumns+","+associatedSourceColumns;
/*
JdbcUtil.executeUpdate(c,
"INSERT INTO " + lookup + "(" + insertColumns + ") SELECT DISTINCT " + nestedSelectColumns +
" FROM " + source + " WHERE "+N.SRC_ID+" > (SELECT MAX(lastid) FROM snapshots WHERE name='" + fact +
"') AND " + associatedSourceColumns + " NOT IN (SELECT "+N.HSH+" FROM " + lookup + ")"
);
*/
// TODO: when snapshotting, there are duplicate CONNECTION POINT VALUES
// we need to decide if we want to accumultae the connection point lookup or not
/*
JdbcUtil.executeUpdate(c,
"INSERT INTO " + lookup + "(" + insertColumns + ") SELECT DISTINCT " + nestedSelectColumns +
" FROM " + source + " WHERE "+N.SRC_ID+" > (SELECT MAX(lastid) FROM snapshots WHERE name='" + fact +"')"
);
*/
JdbcUtil.executeUpdate(c,
"INSERT INTO " + lookup + "(" + insertColumns +
") SELECT DISTINCT " + nestedSelectColumns + " FROM " + source +
" WHERE "+N.SRC_ID+" > "+getLastId(c,fact)
);
JdbcUtil.executeUpdate(c,
"CREATE TABLE delete_ids("+N.HSH+" "+PdmColumn.PDM_COLUMN_TYPE_LONG_TEXT+", "+N.ID+" INT, PRIMARY KEY(id))"
);
JdbcUtil.executeUpdate(c,
"INSERT INTO delete_ids SELECT "+N.HSH+",max("+N.ID+") FROM "+lookup+
" GROUP by "+N.HSH+" HAVING count("+N.ID+") > 1"
);
JdbcUtil.executeUpdate(c,
"DELETE FROM "+lookup+" WHERE "+N.ID+" IN (SELECT "+N.ID+" FROM delete_ids)"
);
JdbcUtil.executeUpdate(c,
"DROP TABLE delete_ids"
);
}
/**
* Concats associated source columns with a DB specific concat method
* The concatenated columns are used as a unique ke (hash id) of each lookup row
* @param lookupTable lookup table
* @return the concatenated columns as String
*/
protected String concatAssociatedSourceColumns(PdmTable lookupTable) {
String associatedColumns = "";
for(PdmColumn column : lookupTable.getAssociatedColumns()) {
// if there are LABELS, the lookup can't be added twice to the FROM clause
if(associatedColumns.length() > 0)
associatedColumns += SYNTAX_CONCAT_OPERATOR + column.getSourceColumn();
else
associatedColumns = column.getSourceColumn();
}
associatedColumns = SYNTAX_CONCAT_FUNCTION_PREFIX + associatedColumns + SYNTAX_CONCAT_FUNCTION_SUFFIX;
return associatedColumns;
}
/**
* Get all columns that will be inserted (exclude autoincrements)
* @param lookupTable lookup table
* @return all columns eglibile for insert
*/
protected String getInsertColumns(PdmTable lookupTable) {
String insertColumns = "";
for(PdmColumn column : lookupTable.getAssociatedColumns()) {
if(insertColumns.length() > 0)
insertColumns += "," + column.getName();
else
insertColumns += column.getName();
}
return insertColumns;
}
/**
* Returns associted columns in the source table
* @param lookupTable lookup table
* @return list of associated source columns
*/
protected String getAssociatedSourceColumns(PdmTable lookupTable) {
String sourceColumns = "";
for(PdmColumn column : lookupTable.getAssociatedColumns()) {
if(sourceColumns.length() > 0)
sourceColumns += "," + column.getSourceColumn();
else
sourceColumns += column.getSourceColumn();
}
return sourceColumns;
}
/**
* Returns non-autoincrement columns
* @param tbl table
* @return non-autoincrement columns
*/
protected String getNonAutoincrementColumns(PdmTable tbl) {
String cols = "";
for (PdmColumn col : tbl.getColumns()) {
String cn = col.getName();
if(!col.isAutoIncrement())
if (cols != null && cols.length() > 0)
cols += "," + cn;
else
cols += cn;
}
return cols;
}
/**
* Returns non-autoincrement columns count
* @param tbl table
* @return non-autoincrement columns count
*/
protected int getNonAutoincrementColumnsCount(PdmTable tbl) {
int cnt =0;
for (PdmColumn col : tbl.getColumns())
if(!col.isAutoIncrement())
cnt++;
return cnt;
}
/**
* Returns the prepared statement quetionmarks
* @param tbl table
* @return prepared statement question column
*/
protected String getPreparedStatementQuestionMarks(PdmTable tbl) {
String cols = "";
for (PdmColumn col : tbl.getColumns()) {
if(!col.isAutoIncrement())
if (cols != null && cols.length() > 0)
cols += ",?";
else
cols += "?";
}
return cols;
}
/**
* Generates the where clause for unloading data to CSVs in the data loading package
* @param part DLI part
* @param schema PDM schema
* @param snapshotIds ids of snapshots to unload
* @return SQL where clause
*/
protected String getLoadWhereClause(DLIPart part, PdmSchema schema, int[] snapshotIds) {
String dliTable = getTableNameFromPart(part);
PdmTable pdmTable = schema.getTableByName(dliTable);
String whereClause = "";
if(PdmTable.PDM_TABLE_TYPE_FACT.equals(pdmTable.getType()) && snapshotIds != null && snapshotIds.length > 0) {
String inClause = "";
for(int i : snapshotIds) {
if(inClause.length()>0)
inClause += ","+i;
else
inClause = "" + i;
}
whereClause = ",snapshots WHERE " + dliTable +
".ID BETWEEN snapshots.firstid and snapshots.lastid AND snapshots.id IN (" + inClause + ")";
}
return whereClause;
}
/**
* Generates the list of columns for unloading data to CSVs in the data loading package
* @param part DLI part
* @param schema PDM schema
* @return list of columns
*/
protected String getLoadColumns(DLIPart part, PdmSchema schema) {
String dliTable = getTableNameFromPart(part);
PdmTable pdmTable = schema.getTableByName(dliTable);
List<Column> columns = part.getColumns();
String cols = "";
for (Column cl : columns) {
PdmColumn col = null;
if (isPrimaryKey(cl)) {
col = pdmTable.getConnectionPointReferenceColumn();
}
if (col == null) {
col = pdmTable.getColumnByName(cl.getName());
}
// fact table fact columns
if(PdmTable.PDM_TABLE_TYPE_FACT.equals(pdmTable.getType()) &&
SourceColumn.LDM_TYPE_FACT.equals(col.getLdmTypeReference()))
cols = decorateFactColumnForLoad(cols, col, dliTable);
// lookup table name column
else if (PdmTable.PDM_TABLE_TYPE_LOOKUP.equals(pdmTable.getType()) &&
SourceColumn.LDM_TYPE_ATTRIBUTE.equals(col.getLdmTypeReference()))
cols = decorateLookupColumnForLoad(cols, col, dliTable);
else
cols = decorateOtherColumnForLoad(cols, col, dliTable);
}
return cols;
}
/**
* Returns the CSV header of the loaded data
* @param part DLI part
* @return list of columns
*/
protected String[] getLoadHeader(DLIPart part) {
List<Column> cols = part.getColumns();
String[] header = new String[cols.size()];
for(int i=0; i<cols.size(); i++) {
header[i] = cols.get(i).getName();
}
return header;
}
protected static boolean isPrimaryKey(Column cl) {
if (cl.getConstraints() == null)
return false;
return cl.getConstraints().matches("(?i).*PRIMARY *KEY.*");
}
/**
* Uses DBMS specific functions for decorating fact columns for unloading from DB to CSV
* @param cols column list
* @param cl column to add to cols
* @param table table name
* @return the amended list
*/
protected String decorateFactColumnForLoad(String cols, PdmColumn cl, String table) {
return decorateOtherColumnForLoad(cols, cl, table);
}
/**
* Uses DBMS specific functions for decorating lookup columns for unloading from DB to CSV
* @param cols column list
* @param cl column to add to cols
* @param table table name
* @return the amended list
*/
protected String decorateLookupColumnForLoad(String cols, PdmColumn cl, String table) {
return decorateOtherColumnForLoad(cols, cl, table);
}
/**
* Uses DBMS specific functions for decorating generic columns for unloading from DB to CSV
* @param cols column list
* @param cl column to add to cols
* @param table table name
* @return the amended list
*/
protected String decorateOtherColumnForLoad(String cols, PdmColumn cl, String table) {
if (cols != null && cols.length() > 0)
cols += "," + table + "." + StringUtil.toIdentifier(cl.getName());
else
cols += table + "." + StringUtil.toIdentifier(cl.getName());
return cols;
}
public void close() {
try {
if (connection != null && !connection.isClosed()) {
connection.close();
}
} catch (SQLException e) {
throw new InternalErrorException(e);
}
}
/**
* Get tab,e name from DLI part
* @param part DLI part
* @return table name
*/
protected String getTableNameFromPart(DLIPart part) {
return StringUtil.toIdentifier(part.getFileName().split("\\.")[0]);
}
/**
* Creates the DBMS specific system functions
* @param c JDBC connection
* @throws SQLException in case of DB issues
*/
protected abstract void createFunctions(Connection c) throws SQLException;
/**
* {@inheritDoc}
*/
public String getUsername() {
return username;
}
/**
* {@inheritDoc}
*/
public void setUsername(String username) {
this.username = username;
}
/**
* {@inheritDoc}
*/
public String getPassword() {
return password;
}
/**
* {@inheritDoc}
*/
public void setPassword(String password) {
this.password = password;
}
/**
* {@inheritDoc}
*/
public String getProjectId() {
return projectId;
}
/**
* {@inheritDoc}
*/
public void setProjectId(String projectId) {
this.projectId = projectId;
}
/**
* {@inheritDoc}
*/
public void executeExtract(PdmSchema schema, String file, boolean hasHeader) {
Connection c = null;
PreparedStatement s = null;
try {
c = getConnection();
l.debug("Extracting data.");
PdmTable sourceTable = schema.getSourceTable();
String source = sourceTable.getName();
String cols = getNonAutoincrementColumns(sourceTable);
String qmrks = getPreparedStatementQuestionMarks(sourceTable);
int cnt = getNonAutoincrementColumnsCount(sourceTable);
CSVReader csvIn = new CSVReader(FileUtil.createBufferedUtf8Reader(file));
String[] nextLine;
int rowCnt = 0;
while ((nextLine = csvIn.readNext()) != null) {
rowCnt++;
if(hasHeader)
hasHeader = false;
else {
if(s == null) {
s = c.prepareStatement("INSERT INTO "+source+"("+cols+") VALUES ("+qmrks+")");
}
if(nextLine.length == cnt) {
for(int i=1; i<=nextLine.length; i++)
if(nextLine[i-1]!=null)
- s.setString(i,nextLine[i-1].substring(0,Constants.LABEL_MAX_LENGTH));
+ s.setString(i,nextLine[i-1].substring(0,Math.min(Constants.LABEL_MAX_LENGTH,nextLine[i-1].length())));
else
s.setString(i,"");
s.addBatch();
}
else {
l.warn("Skipping row "+file+":"+rowCnt+" as it has "+nextLine.length+
" columns. Expecting "+cnt+" columns.");
}
if(rowCnt % BATCH_SIZE == 0) {
s.executeBatch();
s.close();
s = null;
}
}
}
if(s != null) {
s.executeBatch();
}
l.debug("Finished extracting data.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
} catch (IOException e) {
throw new ConnectorBackendException(e);
}
finally {
try {
if(s != null)
s.close();
}
catch (SQLException e) {
// nothing we can do
}
}
}
/**
* {@inheritDoc}
*/
public void executeLoad(PdmSchema schema, DLIPart part, String dir, int[] snapshotIds) {
Connection c = null;
try {
c = getConnection();
l.debug("Unloading data.");
final String file = dir + System.getProperty("file.separator") + part.getFileName();
String cols = getLoadColumns(part, schema);
String whereClause = getLoadWhereClause(part, schema, snapshotIds);
String dliTable = getTableNameFromPart(part);
String sql = "SELECT " + cols + " FROM " + dliTable.toUpperCase() + whereClause;
String[] header = getLoadHeader(part);
CSVWriter cw = FileUtil.createUtf8CsvWriter(new File(file));
cw.writeNext(header);
JdbcUtil.executeQuery(c, sql, new ResultSetCsvWriter(cw));
cw.close();
l.debug("Finished unloading data.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
} catch (IOException e) {
throw new ConnectorBackendException(e);
}
}
private static class ResultSetCsvWriter implements JdbcUtil.ResultSetHandler {
private final CSVWriter cw;
protected int rowCnt = 0;
public ResultSetCsvWriter(CSVWriter cw) {
this.cw = cw;
}
public void handle(ResultSet rs) throws SQLException {
final int length = rs.getMetaData().getColumnCount();
final String[] line = new String[length];
for (int i = 1; i <= length; i++)
line[i - 1] = rs.getString(i);
cw.writeNext(line, true);
}
}
}
| true | true | public void executeExtract(PdmSchema schema, String file, boolean hasHeader) {
Connection c = null;
PreparedStatement s = null;
try {
c = getConnection();
l.debug("Extracting data.");
PdmTable sourceTable = schema.getSourceTable();
String source = sourceTable.getName();
String cols = getNonAutoincrementColumns(sourceTable);
String qmrks = getPreparedStatementQuestionMarks(sourceTable);
int cnt = getNonAutoincrementColumnsCount(sourceTable);
CSVReader csvIn = new CSVReader(FileUtil.createBufferedUtf8Reader(file));
String[] nextLine;
int rowCnt = 0;
while ((nextLine = csvIn.readNext()) != null) {
rowCnt++;
if(hasHeader)
hasHeader = false;
else {
if(s == null) {
s = c.prepareStatement("INSERT INTO "+source+"("+cols+") VALUES ("+qmrks+")");
}
if(nextLine.length == cnt) {
for(int i=1; i<=nextLine.length; i++)
if(nextLine[i-1]!=null)
s.setString(i,nextLine[i-1].substring(0,Constants.LABEL_MAX_LENGTH));
else
s.setString(i,"");
s.addBatch();
}
else {
l.warn("Skipping row "+file+":"+rowCnt+" as it has "+nextLine.length+
" columns. Expecting "+cnt+" columns.");
}
if(rowCnt % BATCH_SIZE == 0) {
s.executeBatch();
s.close();
s = null;
}
}
}
if(s != null) {
s.executeBatch();
}
l.debug("Finished extracting data.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
} catch (IOException e) {
throw new ConnectorBackendException(e);
}
finally {
try {
if(s != null)
s.close();
}
catch (SQLException e) {
// nothing we can do
}
}
}
| public void executeExtract(PdmSchema schema, String file, boolean hasHeader) {
Connection c = null;
PreparedStatement s = null;
try {
c = getConnection();
l.debug("Extracting data.");
PdmTable sourceTable = schema.getSourceTable();
String source = sourceTable.getName();
String cols = getNonAutoincrementColumns(sourceTable);
String qmrks = getPreparedStatementQuestionMarks(sourceTable);
int cnt = getNonAutoincrementColumnsCount(sourceTable);
CSVReader csvIn = new CSVReader(FileUtil.createBufferedUtf8Reader(file));
String[] nextLine;
int rowCnt = 0;
while ((nextLine = csvIn.readNext()) != null) {
rowCnt++;
if(hasHeader)
hasHeader = false;
else {
if(s == null) {
s = c.prepareStatement("INSERT INTO "+source+"("+cols+") VALUES ("+qmrks+")");
}
if(nextLine.length == cnt) {
for(int i=1; i<=nextLine.length; i++)
if(nextLine[i-1]!=null)
s.setString(i,nextLine[i-1].substring(0,Math.min(Constants.LABEL_MAX_LENGTH,nextLine[i-1].length())));
else
s.setString(i,"");
s.addBatch();
}
else {
l.warn("Skipping row "+file+":"+rowCnt+" as it has "+nextLine.length+
" columns. Expecting "+cnt+" columns.");
}
if(rowCnt % BATCH_SIZE == 0) {
s.executeBatch();
s.close();
s = null;
}
}
}
if(s != null) {
s.executeBatch();
}
l.debug("Finished extracting data.");
} catch (SQLException e) {
throw new ConnectorBackendException(e);
} catch (IOException e) {
throw new ConnectorBackendException(e);
}
finally {
try {
if(s != null)
s.close();
}
catch (SQLException e) {
// nothing we can do
}
}
}
|
diff --git a/crono/src/crono/Interpreter.java b/crono/src/crono/Interpreter.java
index 64f0d32..a3f798d 100644
--- a/crono/src/crono/Interpreter.java
+++ b/crono/src/crono/Interpreter.java
@@ -1,238 +1,238 @@
package crono;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Stack;
import crono.type.Atom;
import crono.type.Cons;
import crono.type.CronoType;
import crono.type.Function;
import crono.type.LambdaFunction;
import crono.type.Nil;
import crono.type.Quote;
import crono.type.Symbol;
import crono.type.CronoTypeId;
import crono.type.TypeId;
public class Interpreter extends Visitor {
private static final String _scope_err = "No object %s in scope";
private static final String _too_many_args =
"Too many arguments to %s: %d/%d recieved";
private static final String _type_scope_err = "No type %s in scope";
private static final String _type_mismatch =
"Function '%s' expected arguments %s; got %s";
public boolean show_env;
public boolean show_closure;
public boolean dprint_enable;
public boolean dprint_ident;
public boolean dynamic;
protected int indent_level;
protected Function.EvalType eval;
public Interpreter() {
show_env = false;
show_closure = false;
dprint_enable = false;
dprint_ident = true;
dynamic = false;
indent_level = 0;
eval = Function.EvalType.FULL;
env_stack = new Stack<Environment>();
reset(); /*< Set up initial environment and types */
}
public void reset() {
env_stack.clear();
pushEnv(new Environment());
}
public CronoType visit(Cons c) {
if(eval == Function.EvalType.NONE) {
return c;
}
Iterator<CronoType> iter = c.iterator();
if(!(iter.hasNext())) {
return c; /*< C is an empty list (may be Nil or T) */
}
CronoType value = iter.next().accept(this);
if(value instanceof Function) {
Function fun = ((Function)value);
Function.EvalType reserve = eval;
/* Set the eval type to the current function's type; this keeps
* type errors in builtins from happening, ex:
* (+ arg1 arg2) under partial evaluation would fail since +
* expects two numbers.
*/
eval = fun.eval;
if(eval.level > reserve.level) {
eval = reserve;
}
List<CronoType> args = new ArrayList<CronoType>();
while(iter.hasNext()) {
args.add(iter.next().accept(this));
}
eval = reserve;
int arglen = args.size();
int nargs = fun.arity;
if(arglen < nargs) {
if(arglen == 0) {
/* Special case -- we don't have to do anything to the
* function to return it properly. */
return fun;
}
/* Curry it */
if(fun instanceof LambdaFunction) {
LambdaFunction lfun = ((LambdaFunction)fun);
Environment env = getEnv();
if(!dynamic) {
/* Use the lambda's stored environment */
env = lfun.environment;
}
/* We want to preserve the current environment */
env = new Environment(env);
/* Put known values into the new environment */
for(int i = 0; i < arglen; ++i) {
env.put(lfun.arglist[i], args.get(i));
}
/* Create new argument list and remove remaining args from
* the new environment */
List<Symbol> largs = new ArrayList<Symbol>();
for(int i = arglen; i < lfun.arglist.length; ++i) {
largs.add(lfun.arglist[i]);
env.remove(lfun.arglist[i]);
}
/* Evaluate the body as much as possible */
reserve = eval;
eval = Function.EvalType.PARTIAL;
pushEnv(env);
CronoType[] lbody = new CronoType[lfun.body.length];
for(int i = 0; i < lfun.body.length; ++i) {
lbody[i] = lfun.body[i].accept(this);
}
popEnv();
eval = reserve;
/* Return the new, partially evaluated lambda */
Symbol[] arglist = new Symbol[largs.size()];
return new LambdaFunction(largs.toArray(arglist),
lbody, lfun.environment);
}
/* Builtin partial evaluation */
List<CronoType> body = new LinkedList<CronoType>();
body.add(fun);
body.addAll(args); /*< Dump args in order into the new cons */
CronoType[] barr = new CronoType[body.size()];
barr = body.toArray(barr);
/* Add symbols for missing args */
List<Symbol> arglist = new ArrayList<Symbol>();
Symbol sym;
for(int i = arglen, n = 0; i < nargs; ++i, ++n) {
sym = new Symbol(String.format("_i?%d!_", n));
body.add(sym);
arglist.add(sym);
}
/* Create a new lambda */
Symbol[] narglist = new Symbol[arglist.size()];
return new LambdaFunction(arglist.toArray(narglist), barr,
getEnv());
}
if(arglen > nargs && !fun.variadic) {
throw new RuntimeException(String.format(_too_many_args, fun,
arglen, nargs));
}
/* Full evaluation */
if(fun instanceof LambdaFunction && dynamic) {
/* We have to trick the lambda function if we want dynamic
* scoping. I hate making so many objects left and right, but
* this is the easiest way to do what I want here. */
LambdaFunction lfun = ((LambdaFunction)fun);
lfun = new LambdaFunction(lfun.arglist, lfun.body, getEnv());
CronoType[] argarray = new CronoType[args.size()];
return lfun.run(this, args.toArray(argarray));
}
if(eval == Function.EvalType.FULL) {
CronoType[] argarray = new CronoType[args.size()];
argarray = args.toArray(argarray);
TypeId[] types = new TypeId[args.size()];
for(int i = 0; i < types.length; ++i) {
types[i] = argarray[i].typeId();
}
- for(int i = 0; i < argarray.length; ++i) {
+ for(int i = 0; i < fun.args.length; ++i) {
if(!(fun.args[i].isType(argarray[i]))) {
String argstr = Arrays.toString(types);
String expected = Arrays.toString(fun.args);
throw new InterpreterException(_type_mismatch, fun,
expected, argstr);
}
}
return ((Function)value).run(this, args.toArray(argarray));
}else {
args.add(0, value);
return Cons.fromList(args);
}
}
/* The initial value is not a function */
List<CronoType> list = new LinkedList<CronoType>();
list.add(value);
while(iter.hasNext()) {
list.add(iter.next().accept(this));
}
return Cons.fromList(list);
}
public CronoType visit(Atom a) {
if(eval == Function.EvalType.NONE) {
return a;
}
CronoType t = a;
if(t instanceof Symbol) {
t = getEnv().get((Symbol)a);
if(t == null) {
if(eval == Function.EvalType.FULL) {
throw new RuntimeException(String.format(_scope_err,
a.toString()));
}
t = a;
}
}
/* Not else-if, so that we perform a double-resolution on a symbol that
* represents a TypeId */
if(t instanceof CronoTypeId) {
CronoType res = t; /*< Save symbol resolution in new CronoType */
t = getEnv().getType((CronoTypeId)t);
if(t == null) {
if(eval == Function.EvalType.FULL) {
throw new InterpreterException(_type_scope_err, a);
}
t = res; /*< Revert to symbol resolution */
}
}
return t;
}
public CronoType visit(Quote q) {
return q.node;
}
}
| true | true | public CronoType visit(Cons c) {
if(eval == Function.EvalType.NONE) {
return c;
}
Iterator<CronoType> iter = c.iterator();
if(!(iter.hasNext())) {
return c; /*< C is an empty list (may be Nil or T) */
}
CronoType value = iter.next().accept(this);
if(value instanceof Function) {
Function fun = ((Function)value);
Function.EvalType reserve = eval;
/* Set the eval type to the current function's type; this keeps
* type errors in builtins from happening, ex:
* (+ arg1 arg2) under partial evaluation would fail since +
* expects two numbers.
*/
eval = fun.eval;
if(eval.level > reserve.level) {
eval = reserve;
}
List<CronoType> args = new ArrayList<CronoType>();
while(iter.hasNext()) {
args.add(iter.next().accept(this));
}
eval = reserve;
int arglen = args.size();
int nargs = fun.arity;
if(arglen < nargs) {
if(arglen == 0) {
/* Special case -- we don't have to do anything to the
* function to return it properly. */
return fun;
}
/* Curry it */
if(fun instanceof LambdaFunction) {
LambdaFunction lfun = ((LambdaFunction)fun);
Environment env = getEnv();
if(!dynamic) {
/* Use the lambda's stored environment */
env = lfun.environment;
}
/* We want to preserve the current environment */
env = new Environment(env);
/* Put known values into the new environment */
for(int i = 0; i < arglen; ++i) {
env.put(lfun.arglist[i], args.get(i));
}
/* Create new argument list and remove remaining args from
* the new environment */
List<Symbol> largs = new ArrayList<Symbol>();
for(int i = arglen; i < lfun.arglist.length; ++i) {
largs.add(lfun.arglist[i]);
env.remove(lfun.arglist[i]);
}
/* Evaluate the body as much as possible */
reserve = eval;
eval = Function.EvalType.PARTIAL;
pushEnv(env);
CronoType[] lbody = new CronoType[lfun.body.length];
for(int i = 0; i < lfun.body.length; ++i) {
lbody[i] = lfun.body[i].accept(this);
}
popEnv();
eval = reserve;
/* Return the new, partially evaluated lambda */
Symbol[] arglist = new Symbol[largs.size()];
return new LambdaFunction(largs.toArray(arglist),
lbody, lfun.environment);
}
/* Builtin partial evaluation */
List<CronoType> body = new LinkedList<CronoType>();
body.add(fun);
body.addAll(args); /*< Dump args in order into the new cons */
CronoType[] barr = new CronoType[body.size()];
barr = body.toArray(barr);
/* Add symbols for missing args */
List<Symbol> arglist = new ArrayList<Symbol>();
Symbol sym;
for(int i = arglen, n = 0; i < nargs; ++i, ++n) {
sym = new Symbol(String.format("_i?%d!_", n));
body.add(sym);
arglist.add(sym);
}
/* Create a new lambda */
Symbol[] narglist = new Symbol[arglist.size()];
return new LambdaFunction(arglist.toArray(narglist), barr,
getEnv());
}
if(arglen > nargs && !fun.variadic) {
throw new RuntimeException(String.format(_too_many_args, fun,
arglen, nargs));
}
/* Full evaluation */
if(fun instanceof LambdaFunction && dynamic) {
/* We have to trick the lambda function if we want dynamic
* scoping. I hate making so many objects left and right, but
* this is the easiest way to do what I want here. */
LambdaFunction lfun = ((LambdaFunction)fun);
lfun = new LambdaFunction(lfun.arglist, lfun.body, getEnv());
CronoType[] argarray = new CronoType[args.size()];
return lfun.run(this, args.toArray(argarray));
}
if(eval == Function.EvalType.FULL) {
CronoType[] argarray = new CronoType[args.size()];
argarray = args.toArray(argarray);
TypeId[] types = new TypeId[args.size()];
for(int i = 0; i < types.length; ++i) {
types[i] = argarray[i].typeId();
}
for(int i = 0; i < argarray.length; ++i) {
if(!(fun.args[i].isType(argarray[i]))) {
String argstr = Arrays.toString(types);
String expected = Arrays.toString(fun.args);
throw new InterpreterException(_type_mismatch, fun,
expected, argstr);
}
}
return ((Function)value).run(this, args.toArray(argarray));
}else {
args.add(0, value);
return Cons.fromList(args);
}
}
/* The initial value is not a function */
List<CronoType> list = new LinkedList<CronoType>();
list.add(value);
while(iter.hasNext()) {
list.add(iter.next().accept(this));
}
return Cons.fromList(list);
}
| public CronoType visit(Cons c) {
if(eval == Function.EvalType.NONE) {
return c;
}
Iterator<CronoType> iter = c.iterator();
if(!(iter.hasNext())) {
return c; /*< C is an empty list (may be Nil or T) */
}
CronoType value = iter.next().accept(this);
if(value instanceof Function) {
Function fun = ((Function)value);
Function.EvalType reserve = eval;
/* Set the eval type to the current function's type; this keeps
* type errors in builtins from happening, ex:
* (+ arg1 arg2) under partial evaluation would fail since +
* expects two numbers.
*/
eval = fun.eval;
if(eval.level > reserve.level) {
eval = reserve;
}
List<CronoType> args = new ArrayList<CronoType>();
while(iter.hasNext()) {
args.add(iter.next().accept(this));
}
eval = reserve;
int arglen = args.size();
int nargs = fun.arity;
if(arglen < nargs) {
if(arglen == 0) {
/* Special case -- we don't have to do anything to the
* function to return it properly. */
return fun;
}
/* Curry it */
if(fun instanceof LambdaFunction) {
LambdaFunction lfun = ((LambdaFunction)fun);
Environment env = getEnv();
if(!dynamic) {
/* Use the lambda's stored environment */
env = lfun.environment;
}
/* We want to preserve the current environment */
env = new Environment(env);
/* Put known values into the new environment */
for(int i = 0; i < arglen; ++i) {
env.put(lfun.arglist[i], args.get(i));
}
/* Create new argument list and remove remaining args from
* the new environment */
List<Symbol> largs = new ArrayList<Symbol>();
for(int i = arglen; i < lfun.arglist.length; ++i) {
largs.add(lfun.arglist[i]);
env.remove(lfun.arglist[i]);
}
/* Evaluate the body as much as possible */
reserve = eval;
eval = Function.EvalType.PARTIAL;
pushEnv(env);
CronoType[] lbody = new CronoType[lfun.body.length];
for(int i = 0; i < lfun.body.length; ++i) {
lbody[i] = lfun.body[i].accept(this);
}
popEnv();
eval = reserve;
/* Return the new, partially evaluated lambda */
Symbol[] arglist = new Symbol[largs.size()];
return new LambdaFunction(largs.toArray(arglist),
lbody, lfun.environment);
}
/* Builtin partial evaluation */
List<CronoType> body = new LinkedList<CronoType>();
body.add(fun);
body.addAll(args); /*< Dump args in order into the new cons */
CronoType[] barr = new CronoType[body.size()];
barr = body.toArray(barr);
/* Add symbols for missing args */
List<Symbol> arglist = new ArrayList<Symbol>();
Symbol sym;
for(int i = arglen, n = 0; i < nargs; ++i, ++n) {
sym = new Symbol(String.format("_i?%d!_", n));
body.add(sym);
arglist.add(sym);
}
/* Create a new lambda */
Symbol[] narglist = new Symbol[arglist.size()];
return new LambdaFunction(arglist.toArray(narglist), barr,
getEnv());
}
if(arglen > nargs && !fun.variadic) {
throw new RuntimeException(String.format(_too_many_args, fun,
arglen, nargs));
}
/* Full evaluation */
if(fun instanceof LambdaFunction && dynamic) {
/* We have to trick the lambda function if we want dynamic
* scoping. I hate making so many objects left and right, but
* this is the easiest way to do what I want here. */
LambdaFunction lfun = ((LambdaFunction)fun);
lfun = new LambdaFunction(lfun.arglist, lfun.body, getEnv());
CronoType[] argarray = new CronoType[args.size()];
return lfun.run(this, args.toArray(argarray));
}
if(eval == Function.EvalType.FULL) {
CronoType[] argarray = new CronoType[args.size()];
argarray = args.toArray(argarray);
TypeId[] types = new TypeId[args.size()];
for(int i = 0; i < types.length; ++i) {
types[i] = argarray[i].typeId();
}
for(int i = 0; i < fun.args.length; ++i) {
if(!(fun.args[i].isType(argarray[i]))) {
String argstr = Arrays.toString(types);
String expected = Arrays.toString(fun.args);
throw new InterpreterException(_type_mismatch, fun,
expected, argstr);
}
}
return ((Function)value).run(this, args.toArray(argarray));
}else {
args.add(0, value);
return Cons.fromList(args);
}
}
/* The initial value is not a function */
List<CronoType> list = new LinkedList<CronoType>();
list.add(value);
while(iter.hasNext()) {
list.add(iter.next().accept(this));
}
return Cons.fromList(list);
}
|
diff --git a/jaxrs-doclet/src/main/java/com/hypnoticocelot/jaxrs/doclet/parser/ApiClassParser.java b/jaxrs-doclet/src/main/java/com/hypnoticocelot/jaxrs/doclet/parser/ApiClassParser.java
index 1f464b2..60d4ba3 100644
--- a/jaxrs-doclet/src/main/java/com/hypnoticocelot/jaxrs/doclet/parser/ApiClassParser.java
+++ b/jaxrs-doclet/src/main/java/com/hypnoticocelot/jaxrs/doclet/parser/ApiClassParser.java
@@ -1,123 +1,123 @@
package com.hypnoticocelot.jaxrs.doclet.parser;
import com.google.common.base.Function;
import com.hypnoticocelot.jaxrs.doclet.DocletOptions;
import com.hypnoticocelot.jaxrs.doclet.model.Api;
import com.hypnoticocelot.jaxrs.doclet.model.Method;
import com.hypnoticocelot.jaxrs.doclet.model.Model;
import com.hypnoticocelot.jaxrs.doclet.model.Operation;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.Type;
import java.util.*;
import static com.google.common.base.Objects.firstNonNull;
import static com.google.common.collect.Collections2.transform;
import static com.hypnoticocelot.jaxrs.doclet.parser.AnnotationHelper.parsePath;
public class ApiClassParser {
private final DocletOptions options;
private final ClassDoc classDoc;
private final String rootPath;
private final Set<Model> models;
private final Collection<ClassDoc> classes;
private final Method parentMethod;
public ApiClassParser(DocletOptions options, ClassDoc classDoc, Collection<ClassDoc> classes) {
this.options = options;
this.classDoc = classDoc;
this.rootPath = firstNonNull(parsePath(classDoc.annotations()), "");
this.models = new LinkedHashSet<Model>();
this.classes = classes;
this.parentMethod = null;
}
/**
* Creates sub-resource class parser.
* @param parentMethod method that creates the sub-resource.
*/
public ApiClassParser(DocletOptions options, ClassDoc classDoc, Collection<ClassDoc> classes, Method parentMethod) {
this.options = options;
this.classDoc = classDoc;
this.rootPath = firstNonNull(parsePath(classDoc.annotations()), "");
this.models = new LinkedHashSet<Model>();
this.classes = classes;
this.parentMethod = parentMethod;
}
public String getRootPath() {
return rootPath;
}
public Collection<Api> parse() {
List<Api> apis = new ArrayList<Api>();
Map<String, Collection<Method>> apiMethods = new HashMap<String, Collection<Method>>();
for (MethodDoc method : classDoc.methods()) {
ApiMethodParser methodParser = parentMethod == null ?
new ApiMethodParser(options, rootPath, method) :
new ApiMethodParser(options, parentMethod, method);
Method parsedMethod = methodParser.parse();
if (parsedMethod == null) {
continue;
}
if (parsedMethod.isSubResource()) {
ClassDoc subResourceClassDoc = lookUpClassDoc(method.returnType());
if (subResourceClassDoc != null) {
// delete class from the dictionary to handle recursive sub-resources
Collection<ClassDoc> shrunkClasses = new ArrayList<ClassDoc>(classes);
shrunkClasses.remove(classDoc);
// recursively parse the sub-resource class
ApiClassParser subResourceParser = new ApiClassParser(options, subResourceClassDoc, shrunkClasses, parsedMethod);
apis.addAll(subResourceParser.parse());
models.addAll(subResourceParser.models());
- continue;
}
+ continue;
}
models.addAll(methodParser.models());
String realPath = parsedMethod.getPath();
Collection<Method> matchingMethods = apiMethods.get(realPath);
if (matchingMethods == null) {
matchingMethods = new ArrayList<Method>();
apiMethods.put(realPath, matchingMethods);
}
matchingMethods.add(parsedMethod);
}
for (Map.Entry<String, Collection<Method>> apiEntries : apiMethods.entrySet()) {
Collection<Operation> operations = new ArrayList<Operation>(
transform(apiEntries.getValue(), new Function<Method, Operation>() {
@Override
public Operation apply(Method method) {
return new Operation(method);
}
})
);
apis.add(new Api(apiEntries.getKey(), "", operations));
}
Collections.sort(apis, new Comparator<Api>() {
@Override
public int compare(Api o1, Api o2) {
return o1.getPath().compareTo(o2.getPath());
}
});
return apis;
}
private ClassDoc lookUpClassDoc(Type type) {
for (ClassDoc subResourceClassDoc : classes) {
if (subResourceClassDoc.qualifiedTypeName().equals(type.qualifiedTypeName())) {
return subResourceClassDoc;
}
}
return null;
}
public Collection<Model> models() {
return models;
}
}
| false | true | public Collection<Api> parse() {
List<Api> apis = new ArrayList<Api>();
Map<String, Collection<Method>> apiMethods = new HashMap<String, Collection<Method>>();
for (MethodDoc method : classDoc.methods()) {
ApiMethodParser methodParser = parentMethod == null ?
new ApiMethodParser(options, rootPath, method) :
new ApiMethodParser(options, parentMethod, method);
Method parsedMethod = methodParser.parse();
if (parsedMethod == null) {
continue;
}
if (parsedMethod.isSubResource()) {
ClassDoc subResourceClassDoc = lookUpClassDoc(method.returnType());
if (subResourceClassDoc != null) {
// delete class from the dictionary to handle recursive sub-resources
Collection<ClassDoc> shrunkClasses = new ArrayList<ClassDoc>(classes);
shrunkClasses.remove(classDoc);
// recursively parse the sub-resource class
ApiClassParser subResourceParser = new ApiClassParser(options, subResourceClassDoc, shrunkClasses, parsedMethod);
apis.addAll(subResourceParser.parse());
models.addAll(subResourceParser.models());
continue;
}
}
models.addAll(methodParser.models());
String realPath = parsedMethod.getPath();
Collection<Method> matchingMethods = apiMethods.get(realPath);
if (matchingMethods == null) {
matchingMethods = new ArrayList<Method>();
apiMethods.put(realPath, matchingMethods);
}
matchingMethods.add(parsedMethod);
}
for (Map.Entry<String, Collection<Method>> apiEntries : apiMethods.entrySet()) {
Collection<Operation> operations = new ArrayList<Operation>(
transform(apiEntries.getValue(), new Function<Method, Operation>() {
@Override
public Operation apply(Method method) {
return new Operation(method);
}
})
);
apis.add(new Api(apiEntries.getKey(), "", operations));
}
Collections.sort(apis, new Comparator<Api>() {
@Override
public int compare(Api o1, Api o2) {
return o1.getPath().compareTo(o2.getPath());
}
});
return apis;
}
| public Collection<Api> parse() {
List<Api> apis = new ArrayList<Api>();
Map<String, Collection<Method>> apiMethods = new HashMap<String, Collection<Method>>();
for (MethodDoc method : classDoc.methods()) {
ApiMethodParser methodParser = parentMethod == null ?
new ApiMethodParser(options, rootPath, method) :
new ApiMethodParser(options, parentMethod, method);
Method parsedMethod = methodParser.parse();
if (parsedMethod == null) {
continue;
}
if (parsedMethod.isSubResource()) {
ClassDoc subResourceClassDoc = lookUpClassDoc(method.returnType());
if (subResourceClassDoc != null) {
// delete class from the dictionary to handle recursive sub-resources
Collection<ClassDoc> shrunkClasses = new ArrayList<ClassDoc>(classes);
shrunkClasses.remove(classDoc);
// recursively parse the sub-resource class
ApiClassParser subResourceParser = new ApiClassParser(options, subResourceClassDoc, shrunkClasses, parsedMethod);
apis.addAll(subResourceParser.parse());
models.addAll(subResourceParser.models());
}
continue;
}
models.addAll(methodParser.models());
String realPath = parsedMethod.getPath();
Collection<Method> matchingMethods = apiMethods.get(realPath);
if (matchingMethods == null) {
matchingMethods = new ArrayList<Method>();
apiMethods.put(realPath, matchingMethods);
}
matchingMethods.add(parsedMethod);
}
for (Map.Entry<String, Collection<Method>> apiEntries : apiMethods.entrySet()) {
Collection<Operation> operations = new ArrayList<Operation>(
transform(apiEntries.getValue(), new Function<Method, Operation>() {
@Override
public Operation apply(Method method) {
return new Operation(method);
}
})
);
apis.add(new Api(apiEntries.getKey(), "", operations));
}
Collections.sort(apis, new Comparator<Api>() {
@Override
public int compare(Api o1, Api o2) {
return o1.getPath().compareTo(o2.getPath());
}
});
return apis;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.