diff
stringlengths 262
553k
| is_single_chunk
bool 2
classes | is_single_function
bool 1
class | buggy_function
stringlengths 20
391k
| fixed_function
stringlengths 0
392k
|
---|---|---|---|---|
diff --git a/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/repository/DropAdapterAssistant.java b/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/repository/DropAdapterAssistant.java
index 91bfbf15..a985dda3 100644
--- a/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/repository/DropAdapterAssistant.java
+++ b/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/repository/DropAdapterAssistant.java
@@ -1,85 +1,87 @@
/*******************************************************************************
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Mathias Kinzler (SAP AG) - initial implementation
*******************************************************************************/
package org.eclipse.egit.ui.internal.repository;
import java.io.File;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.egit.ui.Activator;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.RepositoryCache.FileKey;
import org.eclipse.jgit.util.FS;
import org.eclipse.swt.dnd.DropTargetEvent;
import org.eclipse.swt.dnd.FileTransfer;
import org.eclipse.swt.dnd.TransferData;
import org.eclipse.ui.navigator.CommonDropAdapter;
import org.eclipse.ui.navigator.CommonDropAdapterAssistant;
/**
* Drop Adapter Assistant for the Repositories View
*/
public class DropAdapterAssistant extends CommonDropAdapterAssistant {
/**
* Default constructor
*/
public DropAdapterAssistant() {
// nothing
}
@Override
public IStatus handleDrop(CommonDropAdapter aDropAdapter,
DropTargetEvent aDropTargetEvent, Object aTarget) {
String[] data = (String[]) aDropTargetEvent.data;
for (String folder : data) {
File repoFile = new File(folder);
if (FileKey.isGitRepository(repoFile, FS.DETECTED))
Activator.getDefault().getRepositoryUtil()
.addConfiguredRepository(repoFile);
// also a direct parent of a .git dir is allowed
else if (!repoFile.getName().equals(Constants.DOT_GIT)) {
File dotgitfile = new File(repoFile, Constants.DOT_GIT);
if (FileKey.isGitRepository(dotgitfile, FS.DETECTED))
Activator.getDefault().getRepositoryUtil()
.addConfiguredRepository(dotgitfile);
}
}
// the returned Status is not consumed anyway
return Status.OK_STATUS;
}
@Override
public IStatus validateDrop(Object target, int operation,
TransferData transferData) {
// check that all paths are valid repository paths
String[] folders = (String[]) FileTransfer.getInstance().nativeToJava(
transferData);
+ if (folders == null)
+ return Status.CANCEL_STATUS;
for (String folder : folders) {
File repoFile = new File(folder);
if (FileKey.isGitRepository(repoFile, FS.DETECTED)) {
continue;
}
// convenience: also allow the direct parent of .git
if (!repoFile.getName().equals(Constants.DOT_GIT)) {
File dotgitfile = new File(repoFile, Constants.DOT_GIT);
if (FileKey.isGitRepository(dotgitfile, FS.DETECTED))
continue;
}
return Status.CANCEL_STATUS;
}
return Status.OK_STATUS;
}
@Override
public boolean isSupportedType(TransferData aTransferType) {
return FileTransfer.getInstance().isSupportedType(aTransferType);
}
}
| true | true | public IStatus validateDrop(Object target, int operation,
TransferData transferData) {
// check that all paths are valid repository paths
String[] folders = (String[]) FileTransfer.getInstance().nativeToJava(
transferData);
for (String folder : folders) {
File repoFile = new File(folder);
if (FileKey.isGitRepository(repoFile, FS.DETECTED)) {
continue;
}
// convenience: also allow the direct parent of .git
if (!repoFile.getName().equals(Constants.DOT_GIT)) {
File dotgitfile = new File(repoFile, Constants.DOT_GIT);
if (FileKey.isGitRepository(dotgitfile, FS.DETECTED))
continue;
}
return Status.CANCEL_STATUS;
}
return Status.OK_STATUS;
}
| public IStatus validateDrop(Object target, int operation,
TransferData transferData) {
// check that all paths are valid repository paths
String[] folders = (String[]) FileTransfer.getInstance().nativeToJava(
transferData);
if (folders == null)
return Status.CANCEL_STATUS;
for (String folder : folders) {
File repoFile = new File(folder);
if (FileKey.isGitRepository(repoFile, FS.DETECTED)) {
continue;
}
// convenience: also allow the direct parent of .git
if (!repoFile.getName().equals(Constants.DOT_GIT)) {
File dotgitfile = new File(repoFile, Constants.DOT_GIT);
if (FileKey.isGitRepository(dotgitfile, FS.DETECTED))
continue;
}
return Status.CANCEL_STATUS;
}
return Status.OK_STATUS;
}
|
diff --git a/baixing_quanleimu/src/com/quanleimu/view/PostGoodsView.java b/baixing_quanleimu/src/com/quanleimu/view/PostGoodsView.java
index 326c5817..3b81a5a4 100644
--- a/baixing_quanleimu/src/com/quanleimu/view/PostGoodsView.java
+++ b/baixing_quanleimu/src/com/quanleimu/view/PostGoodsView.java
@@ -1,1688 +1,1692 @@
package com.quanleimu.view;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Set;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.Bitmap.CompressFormat;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.quanleimu.activity.BaseActivity;
import com.quanleimu.activity.QuanleimuApplication;
import com.quanleimu.activity.R;
import com.quanleimu.entity.GoodsDetail;
import com.quanleimu.entity.PostGoodsBean;
import com.quanleimu.entity.PostMu;
import com.quanleimu.entity.UserBean;
import com.quanleimu.jsonutil.JsonUtil;
import com.quanleimu.util.Communication;
import com.quanleimu.util.Helper;
import com.quanleimu.util.Util;
import android.view.ViewGroup;
import com.quanleimu.view.MultiLevelSelectionView;
import com.quanleimu.view.PostGoodsCateMainView;
import android.text.InputType;
public class PostGoodsView extends BaseView implements OnClickListener {
public ImageView img1, img2, img3;
public String categoryEnglishName = "";
public String json = "";
public LinearLayout layout_txt;
public LinkedHashMap<String, PostGoodsBean> postList; //发布模板每一项的集合
public static final int NONE = 0;
public static final int PHOTOHRAPH = 1;
public static final int PHOTOZOOM = 2;
public static final int PHOTORESOULT = 3;
public static final int POST_LIST = 4;
public static final int POST_OTHERPROPERTIES = 5;
public static final int POST_CHECKSELECT = 6;
public static final int MSG_MULTISEL_BACK = 10;
public static final int MSG_CATEGORY_SEL_BACK = 11;
public static final String IMAGEUNSPECIFIED = "image/*";
private LinkedHashMap<String, TextView> tvlist;
private String displayname;
private LinkedHashMap<String, String> postMap; //发布需要提交的参数集合
private LinkedHashMap<String, String> initialValueMap;
// private LinkedHashMap<Integer, Object> btMap; //根据postList集添加对应的控件View
private LinkedHashMap<String, Object> editMap;
private AlertDialog ad;
private Button photoalbum, photomake, photocancle;
private ArrayList<String>bitmap_url;
private ImageView[] imgs;
private String mobile, password;
private UserBean user;
private GoodsDetail goodsDetail;
public List<String> listUrl;
private int currentImgView = -1;
private int uploadCount = 0;
private BaseActivity baseActivity;
private Bundle bundle;
private boolean userValidated = false;
private boolean loginTried = false;
static private String lastCategoryEnglishName = null;
static private String lastCategoryShowName = null;
private List<String> otherProperties = new ArrayList<String>();
public PostGoodsView(BaseActivity context, Bundle bundle, String categoryEnglishName){
super(context, bundle);
this.baseActivity = context;
this.categoryEnglishName = categoryEnglishName;
this.bundle = bundle;
init();
}
public PostGoodsView(BaseActivity context, Bundle bundle, String categoryEnglishName, GoodsDetail detail){
super(context, bundle);
this.baseActivity = context;
this.categoryEnglishName = categoryEnglishName;
this.goodsDetail = detail;
this.bundle = bundle;
init();
}
@Override
public void onDestroy() {
// TODO Auto-generated method stub
for(int i = 0; i < 3; ++ i){
File file = new File(Environment.getExternalStorageDirectory(), "temp" + i + ".jpg");
if(file.exists()){
file.delete();
}
file = null;
}
super.onDestroy();
}
// private String getEditMapKeyDisplayName(String keyName){
// if(null == editMap) return null;
// Set<String>keySet = editMap.keySet();
// if(null == keySet) return null;
// Object[] keys = (Object[])keySet.toArray();
// for(int i = 0; i < keys.length; ++ i){
// String[] subKeys = ((String)keys[i]).split(" ");
// if(subKeys.length != 2) continue;
// if(subKeys[1].equals(keyName)) return subKeys[0];
// }
// return null;
// }
private Object getEditMapValue(String key){
if(null == editMap) return null;
Set<String>keySet = editMap.keySet();
if(null == keySet) return null;
Object[] keys = (Object[])keySet.toArray();
for(int i = 0; i < keys.length; ++ i){
String[] subKeys = ((String)keys[i]).split(" ");
for(int j = 0; j < subKeys.length; ++ j){
if(subKeys[j].equals(key)){
return editMap.get(keys[i]);
}
}
}
return null;
}
private void ConfirmAbortAlert(){
AlertDialog.Builder builder = new AlertDialog.Builder(this.getContext());
builder.setTitle("提示:")
.setMessage("您所填写的数据将会丢失,放弃发布?")
.setNegativeButton("否", null)
.setPositiveButton("是",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
if(m_viewInfoListener != null){
m_viewInfoListener.onExit(PostGoodsView.this);
}
}
});
builder.create().show();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event){
if(keyCode == KeyEvent.KEYCODE_BACK && filled()){
ConfirmAbortAlert();
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override protected void onAttachedToWindow(){
if(!userValidated){
usercheck();
}
super.onAttachedToWindow();
}
private void init() {
LayoutInflater inflater = LayoutInflater.from(this.getContext());
View v = inflater.inflate(R.layout.postgoodsview, null);
this.addView(v);
layout_txt = (LinearLayout) v.findViewById(R.id.layout_txt);
baseActivity.getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
initial();
pd = new ProgressDialog(this.getContext());
pd.setTitle("提示");
pd.setMessage("请稍候...");
pd.setCancelable(true);
}
private static String getDisplayValue(PostGoodsBean bean, GoodsDetail detail, String detailKey){
if(bean == null || detail == null || detailKey == null || detailKey.equals(""))return "";
String value = detail.getValueByKey(detailKey);
String displayValue = "";
if(bean.getControlType().equals("input") || bean.getControlType().equals("textarea")){
displayValue = detail.getValueByKey(detailKey);
if(displayValue != null && !bean.getUnit().equals("")){
int pos = displayValue.indexOf(bean.getUnit());
if(pos != -1){
displayValue = displayValue.substring(0, pos);
}
}
return displayValue;
}
else if(bean.getControlType().equals("select") || bean.getControlType().equals("checkbox")){
List<String> beanVs = bean.getValues();
if(beanVs != null){
for(int t = 0; t < beanVs.size(); ++ t){
if(beanVs.get(t).equals(value)){
displayValue = bean.getLabels().get(t);
break;
}
}
}
if(displayValue.equals("")){
String _sValue = detail.getValueByKey(detailKey + "_s");
if(_sValue != null && !_sValue.equals("")){
return _sValue;
}
}
}
return displayValue;
}
private void editpostUI() {
// TODO Auto-generated method stub
if (goodsDetail != null) {
Set<String> sets = editMap.keySet();
if(sets != null){
Object[] objKeys = sets.toArray();
for(int i = 0; i < objKeys.length; ++ i){
String key = (String)objKeys[i];
String[] subs = key.split(" ");
PostGoodsBean bean = postList.get(subs[0]);
String displayValue = getDisplayValue(bean, goodsDetail, subs[1]);
String value = goodsDetail.getValueByKey(subs[1]);
Object obj = this.getEditMapValue(subs[0]);
if (obj != null && value != null && !value.equals("")) {
if (obj instanceof CheckBox) {
String v = (String) ((CheckBox)obj).getTag();
if (displayValue.contains(v)) {
((CheckBox)obj).setChecked(true);
} else {
((CheckBox)obj).setChecked(false);
}
}
else if (obj instanceof EditText) {
((EditText) obj).setText(displayValue);
}
else if (obj instanceof TextView) {
((TextView) obj).setText(displayValue);
}
postMap.put(subs[0], value);
}
}
}
Object objArea = this.getEditMapValue("地区");
if(objArea != null){
String strArea = goodsDetail.getValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_AREANAME);
String[] areas = strArea.split(",");
if(areas.length >= 2){
if(objArea instanceof TextView){
((TextView)objArea).setText(areas[areas.length - 1]);
}
else if(objArea instanceof EditText){
((EditText)objArea).setText(areas[areas.length - 1]);
}
PostGoodsBean areaBean = postList.get("地区");
if(areaBean != null && areaBean.getValues() != null && areaBean.getLabels() != null){
List<String> areaLabels = areaBean.getLabels();
for(int i = 0; i < areaLabels.size(); ++ i){
if(areaLabels.get(i).equals(areas[1])){
postMap.put("地区", areaBean.getValues().get(i));
break;
}
}
}
}
}
if (goodsDetail.getImageList() != null) {
String b = (goodsDetail.getImageList().getResize180())
.substring(1, (goodsDetail.getImageList()
.getResize180()).length() - 1);
b = Communication.replace(b);
if (b.contains(",")) {
String[] c = b.split(",");
for (int k = 0; k < c.length; k++) {
listUrl.add(c[k]);
}
}else{
listUrl.add(b);
}
String big = (goodsDetail.getImageList().getBig())
.substring(1, (goodsDetail.getImageList()
.getBig()).length() - 1);
big = Communication.replace(big);
String[] cbig = big.split(",");
List<String> smalls = new ArrayList<String>();
List<String> bigs = new ArrayList<String>();
for (int j = 0; j < listUrl.size(); j++) {
String bigUrl = (cbig == null || cbig.length <= j) ? null : cbig[j];
if(j > 2)break;
smalls.add(listUrl.get(j));
bigs.add(bigUrl);
}
new Thread(new Imagethread(smalls, bigs)).start();
}
}
//copy to initial value map
for(int j = 0; j < postMap.size(); ++j)
{
String key = (String) postMap.keySet().toArray()[j];
initialValueMap.put(key, postMap.get(key).toString());
}
for(int i = 0; i < listUrl.size(); ++i){
if(listUrl.get(i) != null && listUrl.get(i).length() > 0){
initialValueMap.put(listUrl.get(i), "existingTag");
}
}
}
/**
* 用户登录进发布页、否则进登陆页
*/
private void usercheck() {
user = (UserBean) Util.loadDataFromLocate(this.getContext(), "user");
if (user == null) {
if(loginTried){
if(this.m_viewInfoListener != null){
m_viewInfoListener.onExit(this);
}
}else{
if(this.m_viewInfoListener != null){
bundle.putString("backPageName", "取消");
m_viewInfoListener.onNewView(new LoginView(baseActivity, bundle));
loginTried = true;
}
}
} else {
userValidated = true;
mobile = user.getPhone();
password = user.getPassword();
if((lastCategoryEnglishName == null || lastCategoryEnglishName.equals(""))
&& (categoryEnglishName == null || categoryEnglishName.equals(""))){
this.addCategoryItem();
return;
}
//获取发布模板
String cityEnglishName = QuanleimuApplication.getApplication().cityEnglishName;
if(goodsDetail != null && goodsDetail.getValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_CITYENGLISHNAME).length() > 0){
cityEnglishName = goodsDetail.getValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_CITYENGLISHNAME);
}
if(categoryEnglishName == null || categoryEnglishName.equals("")){
categoryEnglishName = lastCategoryEnglishName;
}
PostMu postMu = (PostMu) Util.loadDataFromLocate(this.getContext(), categoryEnglishName + cityEnglishName);
if (postMu != null && !postMu.getJson().equals("")) {
json = postMu.getJson();
Long time = postMu.getTime();
if (time + (24 * 3600 * 100) < System.currentTimeMillis()) {
//更新界面
myHandler.sendEmptyMessage(1);
//下载新数据 false-不更新界面
new Thread(new GetCategoryMetaThread(false,cityEnglishName)).start();
} else {
//更新界面
myHandler.sendEmptyMessage(1);
}
} else {
//下载新数据 true--更新界面
pd.show();
new Thread(new GetCategoryMetaThread(true,cityEnglishName)).start();
}
}
}
/**
* 初始化
*/
private void initial() {
tvlist = new LinkedHashMap<String, TextView>();
postList = new LinkedHashMap<String, PostGoodsBean>();
postMap = new LinkedHashMap<String, String>();
initialValueMap = new LinkedHashMap<String, String>();
listUrl = new ArrayList<String>();
// textViewMap = new LinkedHashMap<Integer, TextView>();
// checkBoxMap = new LinkedHashMap<Integer, List<CheckBox>>();
bitmap_url = new ArrayList<String>();
bitmap_url.add(null);
bitmap_url.add(null);
bitmap_url.add(null);
// btMap = new LinkedHashMap<Integer, Object>();
editMap = new LinkedHashMap<String, Object>();
}
enum ImageStatus{
ImageStatus_Normal,
ImageStatus_Unset,
ImageStatus_Failed
}
private ImageStatus getCurrentImageStatus(int index){
if(bitmap_url.get(index) == null)return ImageStatus.ImageStatus_Unset;
if(bitmap_url.get(index).contains("http:")) return ImageStatus.ImageStatus_Normal;
return ImageStatus.ImageStatus_Failed;
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
if (v == img1 || v == img2 || v == img3) {
// 3张图片点击事件
for (int i = 0; i < imgs.length; i++) {
if (imgs[i].equals(v)) {
currentImgView = i;
ImageStatus status = getCurrentImageStatus(i);
if(ImageStatus.ImageStatus_Unset == status){
showDialog();
}
else if(ImageStatus.ImageStatus_Failed == status){
String[] items = {"重试", "换一张"};
new AlertDialog.Builder(this.getContext())
.setTitle("选择操作")
.setItems(items, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
if(0 == which){
new Thread(new UpLoadThread(bitmap_url.get(currentImgView), currentImgView)).start();
}
else{
bitmap_url.set(currentImgView, null);
imgs[currentImgView].setImageResource(R.drawable.d);
showDialog();
//((BXDecorateImageView)imgs[currentImgView]).setDecorateResource(-1, BXDecorateImageView.ImagePos.ImagePos_LeftTop);
}
}
})
.setNegativeButton("取消", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
dialog.dismiss();
}
}).show();
}
else{
//String[] items = {"删除"};
new AlertDialog.Builder(this.getContext())
.setMessage("删除当前图片?")
.setPositiveButton("删除", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
bitmap_url.set(currentImgView, null);
imgs[currentImgView].setImageResource(R.drawable.d);
// ((BXDecorateImageView)imgs[currentImgView]).setDecorateResource(-1, BXDecorateImageView.ImagePos.ImagePos_LeftTop);
}
})
.setNegativeButton("取消", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
dialog.dismiss();
}
}).show();
}
}
}
} else if (v == photoalbum) {
// 相册
if (ad.isShowing()) {
ad.dismiss();
}
Intent intent3 = new Intent(Intent.ACTION_GET_CONTENT);
intent3.addCategory(Intent.CATEGORY_OPENABLE);
intent3.setType(IMAGEUNSPECIFIED);
baseActivity.startActivityForResult(Intent.createChooser(intent3, "选择图片"),
PHOTOZOOM);
} else if (v == photomake) {
// 拍照
if (ad.isShowing()) {
ad.dismiss();
}
Intent intent2 = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent2.putExtra(MediaStore.EXTRA_OUTPUT,
Uri.fromFile(new File(Environment.getExternalStorageDirectory(), "temp" + this.currentImgView + ".jpg")));
baseActivity.startActivityForResult(intent2, PHOTOHRAPH);
} else if (v == photocancle) {
ad.dismiss();
}
}
@Override
public boolean onBack()
{
return onLeftActionPressed();
}
@Override
public boolean onLeftActionPressed(){
if(filled()){
ConfirmAbortAlert();
return true;
}
else{
return false;
}
}
@Override
public boolean onRightActionPressed(){
if(uploadCount > 0){
Toast.makeText(this.getContext(),"图片正在上传" + "!", 0).show();
}
// 提交
else if (check2()) {
//循环发布需提交的数据集合
extractInputData();
System.out.println("postMap---?"+postMap);
pd = ProgressDialog.show(this.getContext(), "提示", "请稍候...");
pd.setCancelable(true);
new Thread(new UpdateThread()).start();
}
return true;
}
private void extractInputData() {
for (int i = 0; i < postList.size(); i++) {
String key = (String) postList.keySet().toArray()[i];
PostGoodsBean postGoodsBean = postList.get(key);
if (postGoodsBean.getControlType().equals("input")
|| postGoodsBean.getControlType().equals("textarea")) {
//文本框
EditText et = (EditText)getEditMapValue(postGoodsBean.getDisplayName());
if(et != null){
postMap.put(postGoodsBean.getDisplayName(), et.getText().toString() + postGoodsBean.getUnit());
}
}
}
}
/**
* check whether any content has been filled
*
* @return
*/
private boolean filled() {
return true;
// boolean bRet = false;
//
// for(int i = 0; i < bitmap_url.size(); ++i){
// if(bitmap_url.get(i) != null && bitmap_url.get(i).length() > 0 && initialValueMap.get(bitmap_url.get(i)) == null)
// return true;
// }
//
// extractInputData();
//
// for (int i = 0; i < postList.size(); i++) {
// String key = (String) postList.keySet().toArray()[i];
// PostGoodsBean postGoodsBean = postList.get(key);
//
// String displayName = postGoodsBean.getDisplayName();
//
// String value = postMap.get(displayName);
// String initialValue = initialValueMap.get(displayName)+postGoodsBean.getUnit();
//
// if(initialValue == null || !initialValue.equals(value)){
// bRet = true;
// break;
// }
// }
//
// return bRet;
}
/**
* 检查必填内容是否填写
*
* @return
*/
private boolean check2() {
for (int i = 0; i < postList.size(); i++) {
String key = (String) postList.keySet().toArray()[i];
PostGoodsBean postGoodsBean = postList.get(key);
if (postGoodsBean.getRequired().endsWith("required")) {
if (postGoodsBean.getControlType().equals("select")) {
// TextView obj = (TextView) btMap.get(i);
TextView obj = (TextView) this.getEditMapValue(postGoodsBean.getDisplayName());
if (obj.getText().toString().trim().length() == 0
|| obj.getText().toString().trim().equals("请选择")) {
Toast.makeText(this.getContext(),
"请填写" + postGoodsBean.getDisplayName() + "!", 0)
.show();
return false;
}
} else if (postGoodsBean.getControlType().equals("input")) {
EditText obj = (EditText)getEditMapValue(postGoodsBean.getDisplayName());
// EditText obj = (EditText) btMap.get(i);
if (obj.getText().toString().trim().length() == 0) {
Toast.makeText(this.getContext(),
"请填写" + postGoodsBean.getDisplayName() + "!", 0)
.show();
return false;
}
}
}
}
return true;
}
/**
* 显示拍照相册对话框
*/
private void showDialog() {
View view = LinearLayout.inflate(this.getContext(), R.layout.upload_head, null);
Builder builder = new AlertDialog.Builder(this.getContext());
builder.setView(view);
ad = builder.create();
WindowManager.LayoutParams lp = ad.getWindow().getAttributes();
lp.y = 300;
ad.onWindowAttributesChanged(lp);
ad.show();
photoalbum = (Button) view.findViewById(R.id.photo_album);
photoalbum.setOnClickListener(this);
photomake = (Button) view.findViewById(R.id.photo_make);
photomake.setOnClickListener(this);
photocancle = (Button) view.findViewById(R.id.photo_cancle);
photocancle.setOnClickListener(this);
}
/**
* 发布线程
*
* @author Administrator
*
*/
class UpdateThread implements Runnable {
public void run() {
String apiName = "ad_add";
ArrayList<String> list = new ArrayList<String>();
String city = QuanleimuApplication.getApplication().cityName;
if(goodsDetail != null){
String goodsCity = goodsDetail.getValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_AREANAME);
if(null != goodsCity){
String[]cities = goodsCity.split(",");
if(cities != null && cities.length > 0){
city = cities[0];
}
}
}
if(editMap != null && PostGoodsView.this.getEditMapValue("地区") != null){
Object objArea = PostGoodsView.this.getEditMapValue("地区");
String tvText = null;
if(objArea instanceof TextView){
tvText = ((TextView)objArea).getText().toString();
}
else if(objArea instanceof EditText){
tvText = ((EditText)objArea).getText().toString();
}
if(tvText != null && !tvText.equals("")){
city = city + "," + tvText;
}
}
if(editMap != null && PostGoodsView.this.getEditMapValue("具体地点") != null){
Object objArea = PostGoodsView.this.getEditMapValue("具体地点");
String tvText = null;
if(objArea instanceof TextView){
tvText = ((TextView)objArea).getText().toString();
}
else if(objArea instanceof EditText){
tvText = ((EditText)objArea).getText().toString();
}
if(tvText != null && !tvText.equals("")){
city = city + "," + tvText;
}
}
if(!city.equals("")){
String googleUrl = String.format("http://maps.google.com/maps/geo?q=%s&output=csv", city);
try{
String googleJsn = Communication.getDataByUrl(googleUrl);
String[] info = googleJsn.split(",");
if(info != null && info.length == 4){
//goodsDetail.setValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_LAT, info[2]);
//goodsDetail.setValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_LON, info[3]);
list.add("lat=" + info[2]);
list.add("lng=" + info[3]);
}
}catch(UnsupportedEncodingException e){
e.printStackTrace();
}catch(Exception e){
e.printStackTrace();
}
}
list.add("mobile=" + mobile);
String password1 = Communication.getMD5(password);
password1 += Communication.apiSecret;
String userToken = Communication.getMD5(password1);
list.add("userToken=" + userToken);
list.add("categoryEnglishName=" + categoryEnglishName);
list.add("cityEnglishName=" + QuanleimuApplication.getApplication().cityEnglishName);
list.add("rt=1");
//根据goodsDetail判断是发布还是修改发布
if (goodsDetail != null) {
list.add("adId=" + goodsDetail.getValueByKey(GoodsDetail.EDATAKEYS.EDATAKEYS_ID));
}
//发布发布集合
for (int i = 0; i < postMap.size(); i++) {
String key = (String) postMap.keySet().toArray()[i];
String values = postMap.get(key);
if (values != null && values.length() > 0 && postList.get(key) != null) {
try{
list.add(URLEncoder.encode(postList.get(key).getName(), "UTF-8")
+ "=" + URLEncoder.encode(values, "UTF-8").replaceAll("%7E", "~"));
}catch(UnsupportedEncodingException e){
e.printStackTrace();
}
}
}
//发布图片
for (int i = 0; i < bitmap_url.size(); i++) {
//System.out.println("bitmap_url.keySet().toArray()[i]--?:"+bitmap_url.keySet().toArray()[i]);
if(bitmap_url.get(i) != null && bitmap_url.get(i).contains("http:")){
list.add("image=" + bitmap_url.get(i));
}
}
// list.add("title=" + "111");
// list.add("description=" +
// URLEncoder.encode(descriptionEt.getText().toString()));
String url = Communication.getApiUrl(apiName, list);
try {
json = Communication.getDataByUrl(url);
System.out.println("json---->" + json);
if (json != null) {
JSONObject jsonObject = new JSONObject(json);
JSONObject json = jsonObject.getJSONObject("error");
code = json.getInt("code");
message = json.getString("message");
myHandler.sendEmptyMessage(3);
return;
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
if(pd != null){
pd.dismiss();
}
((BaseActivity)PostGoodsView.this.getContext()).runOnUiThread(new Runnable(){
@Override
public void run(){
Toast.makeText(PostGoodsView.this.getContext(), "发布失败,请检查内容或网络", 0).show();
}
});
}
}
/**
* 获取模板线程
*/
public int code = -1;
public String message = "";
class GetCategoryMetaThread implements Runnable {
private boolean isUpdate;
private String cityEnglishName = null;
public GetCategoryMetaThread(boolean isUpdate, String cityEnglishName) {
this.cityEnglishName = cityEnglishName;
this.isUpdate = isUpdate;
}
public GetCategoryMetaThread(boolean isUpdate) {
this.isUpdate = isUpdate;
}
@Override
public void run() {
String apiName = "category_meta_post";
ArrayList<String> list = new ArrayList<String>();
this.cityEnglishName = (this.cityEnglishName == null ? QuanleimuApplication.getApplication().cityEnglishName : this.cityEnglishName);
list.add("categoryEnglishName=" + categoryEnglishName);
list.add("cityEnglishName=" + this.cityEnglishName);
String url = Communication.getApiUrl(apiName, list);
try {
json = Communication.getDataByUrl(url);
if (json != null) {
// 获取数据成功
PostMu postMu = new PostMu();
postMu.setJson(json);
postMu.setTime(System.currentTimeMillis());
//保存模板
Helper.saveDataToLocate(PostGoodsView.this.getContext(), categoryEnglishName
+ this.cityEnglishName, postMu);
if (isUpdate) {
myHandler.sendEmptyMessage(1);
}
} else {
// {"error":{"code":0,"message":"\u66f4\u65b0\u4fe1\u606f\u6210\u529f"},"id":"191285466"}
myHandler.sendEmptyMessage(2);
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
myHandler.sendEmptyMessage(10);
e.printStackTrace();
}
}
}
private Uri uri = null;
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == NONE) {
return;
}
// 拍照
if (requestCode == PHOTOHRAPH) {
// 设置文件保存路径这里放在跟目录下
File picture = new File(Environment.getExternalStorageDirectory(), "temp" + this.currentImgView + ".jpg");
uri = Uri.fromFile(picture);
getBitmap(uri, PHOTOHRAPH); // 直接返回图片
//startPhotoZoom(uri); //截取图片尺寸
}
if (data == null) {
return;
}
// 读取相册缩放图片
if (requestCode == PHOTOZOOM) {
uri = data.getData();
//startPhotoZoom(uri);
getBitmap(uri, PHOTOZOOM);
}
// 处理结果
if (requestCode == PHOTORESOULT) {
File picture = new File("/sdcard/cropped.jpg");
uri = Uri.fromFile(picture);
getBitmap(uri, PHOTOHRAPH);
File file = new File(Environment.getExternalStorageDirectory(), "temp" + this.currentImgView + "jpg");
try {
if(file.isFile() && file.exists()){
file.delete();
}
} catch (Exception e) {
e.printStackTrace();
}
/*
Bundle extras = data.getExtras();
if (extras != null) {
Bitmap tphoto = extras.getParcelable("data");
ByteArrayOutputStream stream = new ByteArrayOutputStream();
tphoto.compress(Bitmap.CompressFormat.JPEG, 100, stream); // (0 -
// 100)压缩文件
// saveSDCard(photo);
Bitmap photo = Util.newBitmap(tphoto, 480, 480);
imgs[this.currentImgView].setImageBitmap(photo);
imgs[this.currentImgView].setFocusable(true);
tphoto.recycle();
// imgs[bitmap_url.size()].setImageBitmap(photo);
new Thread(new UpLoadThread(photo)).start();
}*/
}
}
private void appendSelectedProperties(String[] lists){
if(lists == null || lists.length == 0) return;
for(int i = 0; i < lists.length; ++ i){
PostGoodsBean bean = this.postList.get(otherProperties.get(Integer.parseInt(lists[i]) - i));
if(bean == null) continue;
this.appendBeanToLayout(bean);
otherProperties.remove(Integer.parseInt(lists[i]) - i);
}
if(otherProperties.size() == 0){
if(layout_txt.getChildCount() > 0){
layout_txt.removeViewAt(layout_txt.getChildCount() - 1);
}
}
else{
if(layout_txt.getChildCount() > 0){
View v = layout_txt.getChildAt(layout_txt.getChildCount() - 1);
if(v != null){
View v2 = v.findViewById(R.id.postshow);
if(v2 != null && v2 instanceof TextView){
((TextView)v2).setText(otherProperties.toString());
}
}
}
}
}
@Override
public void onPreviousViewBack(int message, Object obj){
switch(message){
case POST_LIST:{
int id = (Integer)obj;
TextView tv = tvlist.get(displayname);
String txt = postList.get(displayname).getLabels().get(id);
String txtValue = postList.get(displayname).getValues().get(id);
System.out.println(id+" "+txt+" "+txtValue);
postMap.put(displayname, txtValue);
tv.setText(txt);
break;
}
case POST_OTHERPROPERTIES:
String list = (String)obj;
if(!list.equals("")){
String[] lists = list.split(",");
appendSelectedProperties(lists);
}
break;
case POST_CHECKSELECT:{
TextView tv = tvlist.get(displayname);
String check = (String)obj;
String[] checks = check.split(",");
String value = "";
String txt = "";
for(int i = 0; i < checks.length; ++ i){
if(checks[i].equals(""))continue;
txt += "," + postList.get(displayname).getLabels().get(Integer.parseInt(checks[i]));
value += "," + postList.get(displayname).getValues().get(Integer.parseInt(checks[i]));
}
if(txt.length() > 0){
txt = txt.substring(1);
}
if(value.length() > 0){
value = value.substring(1);
}
postMap.put(displayname, value);
tv.setWidth(layout_txt.getWidth() * 2 / 3);
tv.setText(txt);
break;
}
case MSG_MULTISEL_BACK:{
if(obj instanceof MultiLevelSelectionView.MultiLevelItem){
TextView tv = tvlist.get(displayname);
if(tv != null){
tv.setWidth(layout_txt.getWidth() * 2 / 3);
tv.setText(((MultiLevelSelectionView.MultiLevelItem)obj).txt);
}
postMap.put(displayname, ((MultiLevelSelectionView.MultiLevelItem)obj).id);
}
break;
}
case MSG_CATEGORY_SEL_BACK:{
layout_txt.removeAllViews();
tvlist.clear();
otherProperties.clear();
postList.clear();
postMap.clear();
initialValueMap.clear();
editMap.clear();
bitmap_url.clear();
bitmap_url.add(null);
bitmap_url.add(null);
bitmap_url.add(null);
listUrl.clear();
imgs = null;
currentImgView = -1;
uploadCount = 0;
this.addCategoryItem();
TextView tv = tvlist.get(displayname);
String[] backMsg = ((String)obj).split(",");
if(backMsg == null || backMsg.length != 2) break;
if(tv != null){
tv.setText(backMsg[1]);
}
this.categoryEnglishName = backMsg[0];
lastCategoryEnglishName = backMsg[0];
lastCategoryShowName = backMsg[1];
this.usercheck();
break;
}
default:
break;
}
}
private void getBitmap(Uri uri, int id) {
String path = uri == null ? "" : uri.toString();
// Bitmap photo = null;
// path = getRealPathFromURI(uri); // from Gallery
// if (path == null) {
// path = uri.getPath(); // from File Manager
// }
if (uri != null) {
// try {
// BitmapFactory.Options o = new BitmapFactory.Options();
// o.inPurgeable = true;
// o.inSampleSize = 2;
// Bitmap tphoto = BitmapFactory.decodeFile(path, o);
//photo = Util.newBitmap(tphoto, 480, 480);
//tphoto.recycle();
//imgs[this.currentImgView].setImageBitmap(photo);
// imgs[bitmap_url.size()].setPadding(5, 5, 5, 5);
// imgs[bitmap_url.size()].setBackgroundResource(R.drawable.btn_camera);
imgs[this.currentImgView].setFocusable(true);
new Thread(new UpLoadThread(path, currentImgView)).start();
// } catch (Exception e) {
// TODO Auto-generated catch block
// e.printStackTrace();
// }
}
}
public String getRealPathFromURI(Uri contentUri) {
String[] proj = { MediaStore.Images.Media.DATA };
Cursor cursor = baseActivity.managedQuery(contentUri, proj, null, null, null);
if (cursor == null)
return null;
int column_index = cursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}
public void startPhotoZoom(Uri uri) {
Intent intent = new Intent("com.android.camera.action.CROP");
intent.setDataAndType(uri, IMAGEUNSPECIFIED);
intent.putExtra("crop", "true");
// aspectX aspectY 是宽高的比例
intent.putExtra("aspectX", 1);
intent.putExtra("aspectY", 1);
// outputX outputY 是裁剪图片宽高
// int width = baseActivity.getWindowManager().getDefaultDisplay().getWidth();
//intent.putExtra("outputX", width);
//intent.putExtra("outputY", width);
intent.putExtra("return-data", false);
intent.putExtra("output",Uri.fromFile(new File("/sdcard/cropped.jpg")));
baseActivity.startActivityForResult(intent, PHOTORESOULT);
}
public void saveSDCard(Bitmap photo) {
try {
String filepath = "/sdcard/baixing";
File files = new File(filepath);
files.mkdir();
File file = new File(filepath, "temp" + this.currentImgView + ".jpg");
FileOutputStream outStream = new FileOutputStream(file);
String path = file.getAbsolutePath();
Log.i(path, path);
photo.compress(CompressFormat.JPEG, 100, outStream);
outStream.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void appendBeanToLayout(PostGoodsBean postBean){
ViewGroup layout = null;
if (postBean.getControlType().equals("input")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_edit, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
// btMap.put(position, v.findViewById(R.id.postinput));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.postinput));
if(postBean.getNumeric() != 0){
((EditText)v.findViewById(R.id.postinput)).setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL | InputType.TYPE_NUMBER_FLAG_SIGNED);
}
if (!postBean.getUnit().equals("")) {
((TextView)v.findViewById(R.id.postunit)).setText(postBean.getUnit());
}
layout = (ViewGroup)v;
if(postBean.getName().equals("contact")){
((EditText)v.findViewById(R.id.postinput)).setText(mobile);
}
} else if (postBean.getControlType().equals("select")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else if (postBean.getControlType().equals("checkbox")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
if(postBean.getLabels().size() > 1){
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else{
View v = inflater.inflate(R.layout.item_text_checkbox, null);
((TextView)v.findViewById(R.id.checktext)).setText(postBean.getDisplayName());
v.findViewById(R.id.checkitem).setTag(postBean.getDisplayName());
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.checkitem));
layout = (ViewGroup)v;
}
} else if (postBean.getControlType().equals("textarea")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_description, null);
((TextView)v.findViewById(R.id.postdescriptionshow)).setText(postBean.getDisplayName());
EditText descriptionEt = (EditText)v.findViewById(R.id.postdescriptioninput);
- descriptionEt.setText(QuanleimuApplication.getApplication().getPersonMark());
+ String personalMark = QuanleimuApplication.getApplication().getPersonMark();
+ if(personalMark != null && personalMark.length() > 0){
+ personalMark = "\n\n" + personalMark;
+ descriptionEt.setText(personalMark);
+ }
// textViewMap.put(position, descriptionEt);
// btMap.put(position, descriptionEt);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), descriptionEt);
layout = (ViewGroup)v;
} else if (postBean.getControlType().equals("image")) {
layout = new LinearLayout(PostGoodsView.this.getContext());
((LinearLayout)layout).setOrientation(HORIZONTAL);
layout.setPadding(10, 10, 10, 10);
layout.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.WRAP_CONTENT, 1));
int height = baseActivity.getWindowManager().getDefaultDisplay().getHeight();
int fixHotHeight = height * 15 / 100;
if(fixHotHeight < 50)
{
fixHotHeight = 50;
}
img1 = new ImageView(PostGoodsView.this.getContext());
img2 = new ImageView(PostGoodsView.this.getContext());
img3 = new ImageView(PostGoodsView.this.getContext());
imgs = new ImageView[] { img1, img2, img3 };
//fixHotHeight = layout.getHeight() - 5 * 2;
img1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img1.setAdjustViewBounds(true);
img1.setMaxHeight(fixHotHeight);
img1.setMaxWidth(fixHotHeight);
LinearLayout l1 = new LinearLayout(PostGoodsView.this.getContext());
l1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l1.addView(img1);
img2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img2.setAdjustViewBounds(true);
img2.setMaxHeight(fixHotHeight);
img2.setMaxWidth(fixHotHeight);
LinearLayout l2 = new LinearLayout(PostGoodsView.this.getContext());
l2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l2.addView(img2);
img3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
img3.setAdjustViewBounds(true);
img3.setMaxHeight(fixHotHeight);
img3.setMaxWidth(fixHotHeight);
LinearLayout l3 = new LinearLayout(PostGoodsView.this.getContext());
l3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
l3.addView(img3);
img1.setImageResource(R.drawable.d);
img2.setImageResource(R.drawable.d);
img3.setImageResource(R.drawable.d);
img1.setOnClickListener(PostGoodsView.this);
img2.setOnClickListener(PostGoodsView.this);
img3.setOnClickListener(PostGoodsView.this);
layout.addView(l1);
layout.addView(l2);
layout.addView(l3);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), imgs);
}
if(postMap.get(postBean.getDisplayName()) == null)
postMap.put(postBean.getDisplayName(), "");
layout.setTag(postBean);
if(postBean.getControlType().equals("select") || postBean.getControlType().equals("checkbox")){
layout.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
PostGoodsBean postBean = (PostGoodsBean) v.getTag();
TextView txview = tvlist.get(postBean.getDisplayName());
if (postBean.getControlType().equals("select") || postBean.getControlType().equals("tableSelect")) {
displayname = postBean.getDisplayName();
if(m_viewInfoListener != null){
if(postBean.getLevelCount() > 0){
List<MultiLevelSelectionView.MultiLevelItem> items =
new ArrayList<MultiLevelSelectionView.MultiLevelItem>();
for(int i = 0; i < postBean.getLabels().size(); ++ i){
MultiLevelSelectionView.MultiLevelItem t = new MultiLevelSelectionView.MultiLevelItem();
t.txt = postBean.getLabels().get(i);
t.id = postBean.getValues().get(i);
items.add(t);
}
MultiLevelSelectionView nextView =
new MultiLevelSelectionView((BaseActivity)PostGoodsView.this.getContext(), items, MSG_MULTISEL_BACK, postBean.getLevelCount() - 1);
m_viewInfoListener.onNewView(nextView);
}
else{
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_LIST, true);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
}
else if(postBean.getControlType().equals("checkbox")){
displayname = postBean.getDisplayName();
if(postBean.getLabels().size() > 1){
if(m_viewInfoListener != null){
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_CHECKSELECT, false);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
else{
View checkV = v.findViewById(R.id.checkitem);
if(checkV != null && checkV instanceof CheckBox){
((CheckBox)checkV).setChecked(!((CheckBox)checkV).isChecked());
if(((CheckBox)checkV).isChecked()){
postMap.put(displayname, postBean.getValues().get(0));
}
else{
postMap.remove(displayname);
}
}
}
}
}
});
}
TextView border = new TextView(PostGoodsView.this.getContext());
border.setLayoutParams(new LayoutParams(
LayoutParams.FILL_PARENT, 1, 1));
border.setBackgroundResource(R.drawable.list_divider);
if(layout_txt.getChildCount() % 2 == 1){
int insertIndex =
layout_txt.getChildCount() >= 3 ? layout_txt.getChildCount() - 3 : layout_txt.getChildCount() - 1;
insertIndex = insertIndex >= 0 ? insertIndex : 0;
layout_txt.addView(layout, insertIndex);
layout_txt.addView(border, insertIndex + 1);
}
else{
layout_txt.addView(layout);
layout_txt.addView(border);
}
}
private void addCategoryItem(){
if(this.goodsDetail != null || tvlist.get("分类") != null)return;
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText("分类");
this.displayname = "分类";
tvlist.put(displayname, (TextView)v.findViewById(R.id.posthint));
v.setOnClickListener(new OnClickListener(){
@Override
public void onClick(View v) {
if(m_viewInfoListener != null){
PostGoodsCateMainView pview =
new PostGoodsCateMainView((BaseActivity)PostGoodsView.this.getContext(), bundle, MSG_CATEGORY_SEL_BACK);
m_viewInfoListener.onNewView(pview);
}
}
});
layout_txt.addView(v);
if(categoryEnglishName != null && !categoryEnglishName.equals("")
&& lastCategoryEnglishName != null && lastCategoryEnglishName.equals(categoryEnglishName)){
((TextView)v.findViewById(R.id.posthint)).setText(lastCategoryShowName);
}
TextView border = new TextView(PostGoodsView.this.getContext());
border.setLayoutParams(new LayoutParams(
LayoutParams.FILL_PARENT, 1, 1));
border.setBackgroundResource(R.drawable.list_divider);
layout_txt.addView(border);
}
private void buildPostLayout(){
//根据模板显示
if(null == json || json.equals("")) return;
postList = JsonUtil.getPostGoodsBean(json);
Object[] postListKeySetArray = postList.keySet().toArray();
for (int i = 0; i < postList.size(); i++) {
String key = (String) postListKeySetArray[i];
PostGoodsBean postBean = postList.get(key);
if(postBean.getName().equals("地区")){
this.appendBeanToLayout(postBean);
continue;
}
// if(goodsDetail != null && (postBean.getName().equals("images") && (goodsDetail.getImageList() != null
// && goodsDetail.getImageList().getResize180() != null
// && !goodsDetail.getImageList().getResize180().equals("")))){
if(postBean.getName().equals("images")){
this.appendBeanToLayout(postBean);
continue;
}
if(!postBean.getRequired().endsWith("required")
&& (goodsDetail == null
|| goodsDetail.getValueByKey(postBean.getName()) == null
|| goodsDetail.getValueByKey(postBean.getName()).equals(""))){
otherProperties.add(postBean.getDisplayName());
continue;
}
this.appendBeanToLayout(postBean);
}
if(otherProperties.size() > 0){
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(otherProperties.toString());
((TextView)v.findViewById(R.id.postshow)).setWidth(layout_txt.getWidth() * 2 / 3);
((TextView)v.findViewById(R.id.posthint)).setText("非必选");
v.setOnClickListener(new OnClickListener(){
@Override
public void onClick(View v) {
if(m_viewInfoListener != null){
m_viewInfoListener.onNewView(new OtherPropertiesView(baseActivity, otherProperties, POST_OTHERPROPERTIES, false));
}
}
});
layout_txt.addView(v);
}
editpostUI();
}
// 管理线程的Handler
Handler myHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (pd.isShowing()) {
pd.dismiss();
}
switch (msg.what) {
case 1:
addCategoryItem();
buildPostLayout();
break;
case 2:
if (pd != null) {
pd.dismiss();
}
AlertDialog.Builder builder = new AlertDialog.Builder(
PostGoodsView.this.getContext());
builder.setTitle("提示:")
.setMessage(message)
.setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.dismiss();
}
});
builder.create().show();
break;
case 3:
try {
if(pd != null){
pd.dismiss();
}
JSONObject jsonObject = new JSONObject(json);
String id;
try {
id = jsonObject.getString("id");
} catch (Exception e) {
id = "";
e.printStackTrace();
}
JSONObject json = jsonObject.getJSONObject("error");
String message = json.getString("message");
Toast.makeText(PostGoodsView.this.getContext(), message, 0).show();
if (!id.equals("")) {
// 发布成功
// Toast.makeText(PostGoods.this, "未显示,请手动刷新",
// 3).show();
if(m_viewInfoListener != null){
m_viewInfoListener.onSwitchToTab(BaseView.ETAB_TYPE.ETAB_TYPE_MINE);
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
case 4:
break;
case 10:
if (pd != null) {
pd.dismiss();
}
Toast.makeText(PostGoodsView.this.getContext(), "网络连接失败,请检查设置!", 3).show();
break;
}
super.handleMessage(msg);
}
};
/**
* 上传头像
*
* @author Administrator
*
*/
class UpLoadThread implements Runnable {
private String bmpPath;
private int currentIndex = -1;
private Bitmap thumbnailBmp = null;
public UpLoadThread(String path, int index) {
super();
this.bmpPath = path;
currentIndex = index;
}
public void run() {
baseActivity.runOnUiThread(new Runnable(){
public void run(){
//((BXDecorateImageView)imgs[PostGoods.this.currentImgView]).setDecorateResource(R.drawable.alert_orange, BXDecorateImageView.ImagePos.ImagePos_Center);
imgs[currentIndex].setImageResource(R.drawable.u);
imgs[currentIndex].setClickable(false);
imgs[currentIndex].invalidate();
}
});
synchronized(PostGoodsView.this){
// try{
// uploadMutex.wait();
// }catch(InterruptedException e){
//e.printStackTrace();
// }
++ uploadCount;
if(bmpPath == null || bmpPath.equals("")) return;
Uri uri = Uri.parse(bmpPath);
String path = getRealPathFromURI(uri); // from Gallery
if (path == null) {
path = uri.getPath(); // from File Manager
}
Bitmap currentBmp = null;
if (path != null) {
try {
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, bfo);
BitmapFactory.Options o = new BitmapFactory.Options();
o.inPurgeable = true;
int maxDim = 600;
o.inSampleSize = getClosestResampleSize(bfo.outWidth, bfo.outHeight, maxDim);
currentBmp = BitmapFactory.decodeFile(path, o);
//photo = Util.newBitmap(tphoto, 480, 480);
//tphoto.recycle();
} catch (Exception e) {
e.printStackTrace();
}
}
if(currentBmp == null) {
-- uploadCount;
return;
}
String result = Communication.uploadPicture(currentBmp);
-- uploadCount;
thumbnailBmp = PostGoodsView.createThumbnail(currentBmp, imgs[currentIndex].getHeight());
currentBmp.recycle();
currentBmp = null;
if (result != null) {
bitmap_url.set(currentIndex, result);
baseActivity.runOnUiThread(new Runnable(){
public void run(){
imgs[currentIndex].setImageBitmap(thumbnailBmp);
imgs[currentIndex].setClickable(true);
imgs[currentIndex].invalidate();
Toast.makeText(PostGoodsView.this.getContext(), "上传图片成功", 5).show();
}
});
} else {
// PostGoods.BXImageAndUrl imgAn dUrl = new PostGoods.BXImageAndUrl();
baseActivity.runOnUiThread(new Runnable(){
public void run(){
imgs[currentIndex].setImageResource(R.drawable.f);
imgs[currentIndex].setClickable(true);
bitmap_url.set(currentIndex, bmpPath);
//((BXDecorateImageView)imgs[PostGoods.this.currentImgView]).setDecorateResource(R.drawable.alert_red, BXDecorateImageView.ImagePos.ImagePos_RightTop);
imgs[currentIndex].invalidate();
Toast.makeText(PostGoodsView.this.getContext(), "上传图片失败", 5).show();
}
});
}
// uploadMutex.notifyAll();
}
}
}
private static int getClosestResampleSize(int cx, int cy, int maxDim)
{
int max = Math.max(cx, cy);
int resample = 1;
for (resample = 1; resample < Integer.MAX_VALUE; resample++)
{
if (resample * maxDim > max)
{
resample--;
break;
}
}
if (resample > 0)
{
return resample;
}
return 1;
}
static private Bitmap createThumbnail(Bitmap srcBmp, int thumbHeight)
{
Float width = new Float(srcBmp.getWidth());
Float height = new Float(srcBmp.getHeight());
Float ratio = width/height;
Bitmap thumbnail = Bitmap.createScaledBitmap(srcBmp, (int)(thumbHeight*ratio), thumbHeight, true);
// int padding = (THUMBNAIL_WIDTH - imageBitmap.getWidth())/2;
// imageView.setPadding(padding, 0, padding, 0);
// imageView.setImageBitmap(imageBitmap);
return thumbnail;
}
class SetBitmapThread implements Runnable{
private int index = -1;
private Bitmap bmp;
public SetBitmapThread(int index, Bitmap bmp){
this.index = index;
this.bmp = bmp;
}
@Override
public void run(){
PostGoodsView.this.imgs[index].setImageBitmap(bmp);
PostGoodsView.this.imgs[index].setClickable(true);
}
}
class Imagethread implements Runnable {
private List<String> smalls;
private List<String> bigs;
public Imagethread(List<String> smalls, List<String> bigs){
this.smalls = smalls;
this.bigs = bigs;
}
@Override
public void run() {
for(int i = 0; i < smalls.size(); ++ i){
PostGoodsView.this.imgs[i].setClickable(false);
}
for(int t = 0; t < smalls.size(); ++ t){
try {
Bitmap tbitmap = Util.getImage(smalls.get(t));
PostGoodsView.this.bitmap_url.set(t, bigs.get(t));
baseActivity.runOnUiThread(new SetBitmapThread(t, tbitmap));
} catch (Exception e) {
e.printStackTrace();
PostGoodsView.this.imgs[t].setClickable(true);
}
}
}
}
@Override
public TitleDef getTitleDef(){
TitleDef title = new TitleDef();
title.m_visible = true;
title.m_title = "发布";
title.m_leftActionHint = "返回";
title.m_rightActionHint = "立即发布";
return title;
}
@Override
public TabDef getTabDef(){
TabDef tab = new TabDef();
tab.m_visible = false;
// tab.m_tabSelected = ETAB_TYPE.ETAB_TYPE_PUBLISH;
return tab;
}
}
| true | true | private void appendBeanToLayout(PostGoodsBean postBean){
ViewGroup layout = null;
if (postBean.getControlType().equals("input")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_edit, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
// btMap.put(position, v.findViewById(R.id.postinput));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.postinput));
if(postBean.getNumeric() != 0){
((EditText)v.findViewById(R.id.postinput)).setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL | InputType.TYPE_NUMBER_FLAG_SIGNED);
}
if (!postBean.getUnit().equals("")) {
((TextView)v.findViewById(R.id.postunit)).setText(postBean.getUnit());
}
layout = (ViewGroup)v;
if(postBean.getName().equals("contact")){
((EditText)v.findViewById(R.id.postinput)).setText(mobile);
}
} else if (postBean.getControlType().equals("select")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else if (postBean.getControlType().equals("checkbox")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
if(postBean.getLabels().size() > 1){
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else{
View v = inflater.inflate(R.layout.item_text_checkbox, null);
((TextView)v.findViewById(R.id.checktext)).setText(postBean.getDisplayName());
v.findViewById(R.id.checkitem).setTag(postBean.getDisplayName());
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.checkitem));
layout = (ViewGroup)v;
}
} else if (postBean.getControlType().equals("textarea")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_description, null);
((TextView)v.findViewById(R.id.postdescriptionshow)).setText(postBean.getDisplayName());
EditText descriptionEt = (EditText)v.findViewById(R.id.postdescriptioninput);
descriptionEt.setText(QuanleimuApplication.getApplication().getPersonMark());
// textViewMap.put(position, descriptionEt);
// btMap.put(position, descriptionEt);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), descriptionEt);
layout = (ViewGroup)v;
} else if (postBean.getControlType().equals("image")) {
layout = new LinearLayout(PostGoodsView.this.getContext());
((LinearLayout)layout).setOrientation(HORIZONTAL);
layout.setPadding(10, 10, 10, 10);
layout.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.WRAP_CONTENT, 1));
int height = baseActivity.getWindowManager().getDefaultDisplay().getHeight();
int fixHotHeight = height * 15 / 100;
if(fixHotHeight < 50)
{
fixHotHeight = 50;
}
img1 = new ImageView(PostGoodsView.this.getContext());
img2 = new ImageView(PostGoodsView.this.getContext());
img3 = new ImageView(PostGoodsView.this.getContext());
imgs = new ImageView[] { img1, img2, img3 };
//fixHotHeight = layout.getHeight() - 5 * 2;
img1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img1.setAdjustViewBounds(true);
img1.setMaxHeight(fixHotHeight);
img1.setMaxWidth(fixHotHeight);
LinearLayout l1 = new LinearLayout(PostGoodsView.this.getContext());
l1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l1.addView(img1);
img2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img2.setAdjustViewBounds(true);
img2.setMaxHeight(fixHotHeight);
img2.setMaxWidth(fixHotHeight);
LinearLayout l2 = new LinearLayout(PostGoodsView.this.getContext());
l2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l2.addView(img2);
img3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
img3.setAdjustViewBounds(true);
img3.setMaxHeight(fixHotHeight);
img3.setMaxWidth(fixHotHeight);
LinearLayout l3 = new LinearLayout(PostGoodsView.this.getContext());
l3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
l3.addView(img3);
img1.setImageResource(R.drawable.d);
img2.setImageResource(R.drawable.d);
img3.setImageResource(R.drawable.d);
img1.setOnClickListener(PostGoodsView.this);
img2.setOnClickListener(PostGoodsView.this);
img3.setOnClickListener(PostGoodsView.this);
layout.addView(l1);
layout.addView(l2);
layout.addView(l3);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), imgs);
}
if(postMap.get(postBean.getDisplayName()) == null)
postMap.put(postBean.getDisplayName(), "");
layout.setTag(postBean);
if(postBean.getControlType().equals("select") || postBean.getControlType().equals("checkbox")){
layout.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
PostGoodsBean postBean = (PostGoodsBean) v.getTag();
TextView txview = tvlist.get(postBean.getDisplayName());
if (postBean.getControlType().equals("select") || postBean.getControlType().equals("tableSelect")) {
displayname = postBean.getDisplayName();
if(m_viewInfoListener != null){
if(postBean.getLevelCount() > 0){
List<MultiLevelSelectionView.MultiLevelItem> items =
new ArrayList<MultiLevelSelectionView.MultiLevelItem>();
for(int i = 0; i < postBean.getLabels().size(); ++ i){
MultiLevelSelectionView.MultiLevelItem t = new MultiLevelSelectionView.MultiLevelItem();
t.txt = postBean.getLabels().get(i);
t.id = postBean.getValues().get(i);
items.add(t);
}
MultiLevelSelectionView nextView =
new MultiLevelSelectionView((BaseActivity)PostGoodsView.this.getContext(), items, MSG_MULTISEL_BACK, postBean.getLevelCount() - 1);
m_viewInfoListener.onNewView(nextView);
}
else{
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_LIST, true);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
}
else if(postBean.getControlType().equals("checkbox")){
displayname = postBean.getDisplayName();
if(postBean.getLabels().size() > 1){
if(m_viewInfoListener != null){
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_CHECKSELECT, false);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
else{
View checkV = v.findViewById(R.id.checkitem);
if(checkV != null && checkV instanceof CheckBox){
((CheckBox)checkV).setChecked(!((CheckBox)checkV).isChecked());
if(((CheckBox)checkV).isChecked()){
postMap.put(displayname, postBean.getValues().get(0));
}
else{
postMap.remove(displayname);
}
}
}
}
}
});
}
TextView border = new TextView(PostGoodsView.this.getContext());
border.setLayoutParams(new LayoutParams(
LayoutParams.FILL_PARENT, 1, 1));
border.setBackgroundResource(R.drawable.list_divider);
if(layout_txt.getChildCount() % 2 == 1){
int insertIndex =
layout_txt.getChildCount() >= 3 ? layout_txt.getChildCount() - 3 : layout_txt.getChildCount() - 1;
insertIndex = insertIndex >= 0 ? insertIndex : 0;
layout_txt.addView(layout, insertIndex);
layout_txt.addView(border, insertIndex + 1);
}
else{
layout_txt.addView(layout);
layout_txt.addView(border);
}
}
| private void appendBeanToLayout(PostGoodsBean postBean){
ViewGroup layout = null;
if (postBean.getControlType().equals("input")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_edit, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
// btMap.put(position, v.findViewById(R.id.postinput));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.postinput));
if(postBean.getNumeric() != 0){
((EditText)v.findViewById(R.id.postinput)).setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_FLAG_DECIMAL | InputType.TYPE_NUMBER_FLAG_SIGNED);
}
if (!postBean.getUnit().equals("")) {
((TextView)v.findViewById(R.id.postunit)).setText(postBean.getUnit());
}
layout = (ViewGroup)v;
if(postBean.getName().equals("contact")){
((EditText)v.findViewById(R.id.postinput)).setText(mobile);
}
} else if (postBean.getControlType().equals("select")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else if (postBean.getControlType().equals("checkbox")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
if(postBean.getLabels().size() > 1){
View v = inflater.inflate(R.layout.item_post_select, null);
((TextView)v.findViewById(R.id.postshow)).setText(postBean.getDisplayName());
tvlist.put(postBean.getDisplayName(), (TextView)v.findViewById(R.id.posthint));
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.posthint));
layout = (ViewGroup)v;
}
else{
View v = inflater.inflate(R.layout.item_text_checkbox, null);
((TextView)v.findViewById(R.id.checktext)).setText(postBean.getDisplayName());
v.findViewById(R.id.checkitem).setTag(postBean.getDisplayName());
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), v.findViewById(R.id.checkitem));
layout = (ViewGroup)v;
}
} else if (postBean.getControlType().equals("textarea")) {
LayoutInflater inflater = LayoutInflater.from(PostGoodsView.this.getContext());
View v = inflater.inflate(R.layout.item_post_description, null);
((TextView)v.findViewById(R.id.postdescriptionshow)).setText(postBean.getDisplayName());
EditText descriptionEt = (EditText)v.findViewById(R.id.postdescriptioninput);
String personalMark = QuanleimuApplication.getApplication().getPersonMark();
if(personalMark != null && personalMark.length() > 0){
personalMark = "\n\n" + personalMark;
descriptionEt.setText(personalMark);
}
// textViewMap.put(position, descriptionEt);
// btMap.put(position, descriptionEt);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), descriptionEt);
layout = (ViewGroup)v;
} else if (postBean.getControlType().equals("image")) {
layout = new LinearLayout(PostGoodsView.this.getContext());
((LinearLayout)layout).setOrientation(HORIZONTAL);
layout.setPadding(10, 10, 10, 10);
layout.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.WRAP_CONTENT, 1));
int height = baseActivity.getWindowManager().getDefaultDisplay().getHeight();
int fixHotHeight = height * 15 / 100;
if(fixHotHeight < 50)
{
fixHotHeight = 50;
}
img1 = new ImageView(PostGoodsView.this.getContext());
img2 = new ImageView(PostGoodsView.this.getContext());
img3 = new ImageView(PostGoodsView.this.getContext());
imgs = new ImageView[] { img1, img2, img3 };
//fixHotHeight = layout.getHeight() - 5 * 2;
img1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img1.setAdjustViewBounds(true);
img1.setMaxHeight(fixHotHeight);
img1.setMaxWidth(fixHotHeight);
LinearLayout l1 = new LinearLayout(PostGoodsView.this.getContext());
l1.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l1.addView(img1);
img2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
img2.setAdjustViewBounds(true);
img2.setMaxHeight(fixHotHeight);
img2.setMaxWidth(fixHotHeight);
LinearLayout l2 = new LinearLayout(PostGoodsView.this.getContext());
l2.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
l2.addView(img2);
img3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT, 1));
img3.setAdjustViewBounds(true);
img3.setMaxHeight(fixHotHeight);
img3.setMaxWidth(fixHotHeight);
LinearLayout l3 = new LinearLayout(PostGoodsView.this.getContext());
l3.setLayoutParams(new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
l3.addView(img3);
img1.setImageResource(R.drawable.d);
img2.setImageResource(R.drawable.d);
img3.setImageResource(R.drawable.d);
img1.setOnClickListener(PostGoodsView.this);
img2.setOnClickListener(PostGoodsView.this);
img3.setOnClickListener(PostGoodsView.this);
layout.addView(l1);
layout.addView(l2);
layout.addView(l3);
editMap.put(postBean.getDisplayName() + " " + postBean.getName(), imgs);
}
if(postMap.get(postBean.getDisplayName()) == null)
postMap.put(postBean.getDisplayName(), "");
layout.setTag(postBean);
if(postBean.getControlType().equals("select") || postBean.getControlType().equals("checkbox")){
layout.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
PostGoodsBean postBean = (PostGoodsBean) v.getTag();
TextView txview = tvlist.get(postBean.getDisplayName());
if (postBean.getControlType().equals("select") || postBean.getControlType().equals("tableSelect")) {
displayname = postBean.getDisplayName();
if(m_viewInfoListener != null){
if(postBean.getLevelCount() > 0){
List<MultiLevelSelectionView.MultiLevelItem> items =
new ArrayList<MultiLevelSelectionView.MultiLevelItem>();
for(int i = 0; i < postBean.getLabels().size(); ++ i){
MultiLevelSelectionView.MultiLevelItem t = new MultiLevelSelectionView.MultiLevelItem();
t.txt = postBean.getLabels().get(i);
t.id = postBean.getValues().get(i);
items.add(t);
}
MultiLevelSelectionView nextView =
new MultiLevelSelectionView((BaseActivity)PostGoodsView.this.getContext(), items, MSG_MULTISEL_BACK, postBean.getLevelCount() - 1);
m_viewInfoListener.onNewView(nextView);
}
else{
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_LIST, true);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
}
else if(postBean.getControlType().equals("checkbox")){
displayname = postBean.getDisplayName();
if(postBean.getLabels().size() > 1){
if(m_viewInfoListener != null){
OtherPropertiesView next = new OtherPropertiesView(baseActivity, postBean.getLabels(), POST_CHECKSELECT, false);
next.setTitle(postBean.getDisplayName());
if(txview != null){
next.setSelectedItems(txview.getText().toString());
}
m_viewInfoListener.onNewView(next);
}
}
else{
View checkV = v.findViewById(R.id.checkitem);
if(checkV != null && checkV instanceof CheckBox){
((CheckBox)checkV).setChecked(!((CheckBox)checkV).isChecked());
if(((CheckBox)checkV).isChecked()){
postMap.put(displayname, postBean.getValues().get(0));
}
else{
postMap.remove(displayname);
}
}
}
}
}
});
}
TextView border = new TextView(PostGoodsView.this.getContext());
border.setLayoutParams(new LayoutParams(
LayoutParams.FILL_PARENT, 1, 1));
border.setBackgroundResource(R.drawable.list_divider);
if(layout_txt.getChildCount() % 2 == 1){
int insertIndex =
layout_txt.getChildCount() >= 3 ? layout_txt.getChildCount() - 3 : layout_txt.getChildCount() - 1;
insertIndex = insertIndex >= 0 ? insertIndex : 0;
layout_txt.addView(layout, insertIndex);
layout_txt.addView(border, insertIndex + 1);
}
else{
layout_txt.addView(layout);
layout_txt.addView(border);
}
}
|
diff --git a/plugins/org.eclipse.birt.core/src/org/eclipse/birt/core/internal/function/impl/FunctionProviderImpl.java b/plugins/org.eclipse.birt.core/src/org/eclipse/birt/core/internal/function/impl/FunctionProviderImpl.java
index 260c5e6..4d0d8ae 100644
--- a/plugins/org.eclipse.birt.core/src/org/eclipse/birt/core/internal/function/impl/FunctionProviderImpl.java
+++ b/plugins/org.eclipse.birt.core/src/org/eclipse/birt/core/internal/function/impl/FunctionProviderImpl.java
@@ -1,413 +1,413 @@
/*******************************************************************************
* Copyright (c) 2004, 2008 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.core.internal.function.impl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.birt.core.data.DataTypeUtil;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.core.framework.IConfigurationElement;
import org.eclipse.birt.core.framework.IExtension;
import org.eclipse.birt.core.framework.IExtensionPoint;
import org.eclipse.birt.core.framework.IExtensionRegistry;
import org.eclipse.birt.core.framework.Platform;
import org.eclipse.birt.core.script.functionservice.IScriptFunction;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionArgument;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionCategory;
import org.eclipse.birt.core.script.functionservice.IScriptFunctionFactory;
import org.eclipse.birt.core.script.functionservice.impl.Argument;
import org.eclipse.birt.core.script.functionservice.impl.Category;
import org.eclipse.birt.core.script.functionservice.impl.CategoryWrapper;
import org.eclipse.birt.core.script.functionservice.impl.IFunctionProvider;
import org.eclipse.birt.core.script.functionservice.impl.ScriptFunction;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.Script;
import org.mozilla.javascript.Scriptable;
import org.mozilla.javascript.ScriptableObject;
import org.osgi.framework.Bundle;
/**
*
*/
public class FunctionProviderImpl implements IFunctionProvider
{
//The extension constants
private static final String EXTENSION_POINT = "org.eclipse.birt.core.ScriptFunctionService";
private static final String ELEMENT_CATEGORY = "Category";
private static final String ELEMENT_FUNCTION = "Function";
private static final String ELEMENT_ARGUMENT = "Argument";
private static final String ELEMENT_JSLIB = "JSLib";
private static final String ELEMENT_DATATYPE = "DataType";
private static final String ATTRIBUTE_NAME = "name";
private static final String ATTRIBUTE_DESC = "desc";
private static final String ATTRIBUTE_FACTORYCLASS = "factoryclass";
private static final String ATTRIBUTE_VALUE = "value";
private static final String ATTRIBUTE_ISOPTIONAL = "isOptional";
private static final String ATTRIBUTE_ALLOWVARARGUMENT = "variableArguments";
private static final String ATTRIBUTE_ISSTATIC="isStatic";
private static final String ATTRIBUTE_ISCONSTRUCTOR="isConstructor";
private static final String ATTRIBUTE_LOCATION = "location";
private static final String ATTRIBUTE_ISVISIBLE="isVisible";
private static final String DEFAULT_CATEGORYNAME = null;
private Map<String, Category> categories;
private List<URL> jsLibs = new ArrayList<URL>( );
/**
* Return all the categories defined by extensions.
*
* @return
* @throws BirtException
*/
public IScriptFunctionCategory[] getCategories( )
throws BirtException
{
return getCategoryMap( ).values( )
.toArray( new IScriptFunctionCategory[]{} );
}
/**
* Return the functions that defined in a category.
*
* @param categoryName
* @return
* @throws BirtException
*/
public IScriptFunction[] getFunctions( String categoryName )
throws BirtException
{
if ( getCategoryMap( ).containsKey( categoryName ) )
{
Category category = getCategoryMap( ).get( categoryName );
return category.getFunctions( );
}
return new IScriptFunction[0];
}
/**
* Register script functions to scope.
*
* @param cx
* @param scope
* @throws BirtException
*/
public void registerScriptFunction( Context cx, Scriptable scope )
throws BirtException
{
List<CategoryWrapper> wrapperedCategories = getWrapperedCategories( );
for ( CategoryWrapper category : wrapperedCategories )
{
ScriptableObject.putProperty( scope,
category.getClassName( ),
category );
}
for ( URL url : jsLibs )
{
Script script;
try
{
script = cx.compileReader( new BufferedReader( new InputStreamReader( url.openStream( ) ) ),
null,
0,
null );
script.exec( cx, scope );
}
catch ( IOException e )
{
}
}
}
/**
* Return the category map.
*
* @return
*/
- private Map<String, Category> getCategoryMap( )
+ private synchronized Map<String, Category> getCategoryMap( )
{
if ( categories != null )
return categories;
categories = new HashMap<String, Category>( );
//Find the extension point.
IExtensionRegistry extReg = Platform.getExtensionRegistry( );
IExtensionPoint extPoint = extReg.getExtensionPoint( EXTENSION_POINT );
if ( extPoint == null )
return categories;
//Fetch all extensions
IExtension[] exts = extPoint.getExtensions( );
if ( exts == null )
{
return categories;
}
//populate category map as per extension.
for ( int e = 0; e < exts.length; e++ )
{
try
{
IConfigurationElement[] configElems = exts[e].getConfigurationElements( );
if ( configElems == null )
continue;
for ( int i = 0; i < configElems.length; i++ )
{
boolean isVisible = extractBoolean( configElems[i].getAttribute( ATTRIBUTE_ISVISIBLE ),
true );
// for element Category
if ( configElems[i].getName( ).equals( ELEMENT_CATEGORY ) )
{
Category category = new Category( configElems[i].getAttribute( ATTRIBUTE_NAME ),
configElems[i].getAttribute( ATTRIBUTE_DESC ),
isVisible );
categories.put( category.getName( ), category );
IScriptFunctionFactory factory = null;
if ( configElems[i].getAttribute( ATTRIBUTE_FACTORYCLASS ) != null )
factory = (IScriptFunctionFactory) configElems[i].createExecutableExtension( ATTRIBUTE_FACTORYCLASS );
IConfigurationElement[] functions = configElems[i].getChildren( ELEMENT_FUNCTION );
for ( int j = 0; j < functions.length; j++ )
{
IScriptFunction function = getScriptFunction( category,
factory,
functions[j] );
if ( function != null )
category.addFunction( function );
}
}
// For element function that are not under certain category.
// Usually those functions are
// defined in .js file
else if ( configElems[i].getName( )
.equals( ELEMENT_FUNCTION ) )
{
if ( categories.get( DEFAULT_CATEGORYNAME ) == null )
{
categories.put( DEFAULT_CATEGORYNAME,
new Category( DEFAULT_CATEGORYNAME,
null,
isVisible ) );
}
IScriptFunction function = getScriptFunction( categories.get( DEFAULT_CATEGORYNAME ),
null,
configElems[i] );
if ( function != null )
categories.get( DEFAULT_CATEGORYNAME )
.addFunction( function );
}
// Populate the .js script library
else if ( configElems[i].getName( ).equals( ELEMENT_JSLIB ) )
{
populateResources( jsLibs, ".js", configElems[i] );
}
}
}
catch ( BirtException ex )
{
ex.printStackTrace( );
}
}
return categories;
}
/**
* Populate library resources. The library resources includes .js script lib and .jar java lib.
* @param libs
* @param suffix
* @param confElement
*/
private static void populateResources( List<URL> libs, String suffix,
IConfigurationElement confElement )
{
String source = confElement.getAttribute( ATTRIBUTE_LOCATION );
IExtension extension = confElement.getDeclaringExtension( );
String namespace = extension.getNamespace( );
Bundle bundle = org.eclipse.core.runtime.Platform.getBundle( namespace );
if ( bundle != null )
{
Enumeration<String> files = bundle.getEntryPaths( source );
if ( files != null )
{
// In this case, the bundle denotes to a directory.
while ( files.hasMoreElements( ) )
{
String filePath = files.nextElement( );
if ( filePath.toLowerCase( ).endsWith( suffix ) )
{
URL url = bundle.getEntry( filePath );
if ( url != null )
{
libs.add( url );
}
}
}
}
else
{
// the bundle denotes to a file.
if ( source.toLowerCase( ).endsWith( suffix ) )
{
URL url = bundle.getEntry( source );
if ( url != null )
{
libs.add( url );
}
}
}
}
}
/**
* Create script function out of a function element.
* @param category
* @param factory
* @param function
* @return
*/
private static IScriptFunction getScriptFunction( Category category,
IScriptFunctionFactory factory, IConfigurationElement function )
{
try
{
//Function name
String name = function.getAttribute( ATTRIBUTE_NAME );
//Function Desc
String desc = function.getAttribute( ATTRIBUTE_DESC );
//Allow var argument
String varArgs = function.getAttribute( ATTRIBUTE_ALLOWVARARGUMENT );
boolean allowVarArgs = extractBoolean( varArgs, false );
boolean isConstructor = extractBoolean( function.getAttribute( ATTRIBUTE_ISCONSTRUCTOR ), false);
boolean isStatic = extractBoolean( function.getAttribute( ATTRIBUTE_ISSTATIC ), true);
boolean isVisible = extractBoolean( function.getAttribute( ATTRIBUTE_ISVISIBLE ), true);
String dataType = null;
List<IScriptFunctionArgument> arguments = new ArrayList<IScriptFunctionArgument>( );
//Populate function return data type info.
if ( hasChildren( ELEMENT_DATATYPE, function ) )
{
dataType = function.getChildren( ELEMENT_DATATYPE )[0].getAttribute( ATTRIBUTE_VALUE );
}
//Popualte function argument info
if ( hasChildren( ELEMENT_ARGUMENT, function ) )
{
for ( int i = 0; i < function.getChildren( ELEMENT_ARGUMENT ).length; i++ )
{
arguments.add( getScriptFunctionArgument( function.getChildren( ELEMENT_ARGUMENT )[i] ) );
}
}
return new ScriptFunction( name,
category,
arguments.toArray( new IScriptFunctionArgument[0] ),
dataType,
desc,
factory == null ? null : factory.getFunctionExecutor( name ),
allowVarArgs,
isStatic,
isConstructor,
isVisible );
}
catch ( Exception e )
{
return null;
}
}
private static boolean extractBoolean( String strValue, boolean ifNull )
throws BirtException
{
boolean booleanValue = strValue == null ? ifNull
: DataTypeUtil.toBoolean( strValue );
return booleanValue;
}
/**
* Populate function argument.
* @param argument
* @return
* @throws BirtException
*/
private static IScriptFunctionArgument getScriptFunctionArgument(
IConfigurationElement argument ) throws BirtException
{
//
String name = argument.getAttribute( ATTRIBUTE_NAME );
String desc = argument.getAttribute( ATTRIBUTE_DESC );
//populate whether it is optional argument.
String optional = argument.getAttribute( ATTRIBUTE_ISOPTIONAL );
boolean isOptional = extractBoolean( optional, false );
String dataType = null;
//Populate data type
if ( hasChildren( ELEMENT_DATATYPE, argument ) )
{
dataType = argument.getChildren( ELEMENT_DATATYPE )[0].getAttribute( ATTRIBUTE_VALUE );
}
return new Argument( name,
dataType,
desc,
isOptional );
}
/**
*
* @param name
* @param element
* @return
*/
private static boolean hasChildren( String name,
IConfigurationElement element )
{
IConfigurationElement[] children = element.getChildren( name );
return children != null && children.length > 0;
}
/**
* Create category wrapper.
*
* @return
* @throws BirtException
*/
private List<CategoryWrapper> getWrapperedCategories( )
throws BirtException
{
List<CategoryWrapper> result = new ArrayList<CategoryWrapper>( );
for ( Category category : getCategoryMap( ).values( ) )
{
if ( category.getName( ) != DEFAULT_CATEGORYNAME )
result.add( new CategoryWrapper( category ) );
}
return result;
}
}
| true | true | private Map<String, Category> getCategoryMap( )
{
if ( categories != null )
return categories;
categories = new HashMap<String, Category>( );
//Find the extension point.
IExtensionRegistry extReg = Platform.getExtensionRegistry( );
IExtensionPoint extPoint = extReg.getExtensionPoint( EXTENSION_POINT );
if ( extPoint == null )
return categories;
//Fetch all extensions
IExtension[] exts = extPoint.getExtensions( );
if ( exts == null )
{
return categories;
}
//populate category map as per extension.
for ( int e = 0; e < exts.length; e++ )
{
try
{
IConfigurationElement[] configElems = exts[e].getConfigurationElements( );
if ( configElems == null )
continue;
for ( int i = 0; i < configElems.length; i++ )
{
boolean isVisible = extractBoolean( configElems[i].getAttribute( ATTRIBUTE_ISVISIBLE ),
true );
// for element Category
if ( configElems[i].getName( ).equals( ELEMENT_CATEGORY ) )
{
Category category = new Category( configElems[i].getAttribute( ATTRIBUTE_NAME ),
configElems[i].getAttribute( ATTRIBUTE_DESC ),
isVisible );
categories.put( category.getName( ), category );
IScriptFunctionFactory factory = null;
if ( configElems[i].getAttribute( ATTRIBUTE_FACTORYCLASS ) != null )
factory = (IScriptFunctionFactory) configElems[i].createExecutableExtension( ATTRIBUTE_FACTORYCLASS );
IConfigurationElement[] functions = configElems[i].getChildren( ELEMENT_FUNCTION );
for ( int j = 0; j < functions.length; j++ )
{
IScriptFunction function = getScriptFunction( category,
factory,
functions[j] );
if ( function != null )
category.addFunction( function );
}
}
// For element function that are not under certain category.
// Usually those functions are
// defined in .js file
else if ( configElems[i].getName( )
.equals( ELEMENT_FUNCTION ) )
{
if ( categories.get( DEFAULT_CATEGORYNAME ) == null )
{
categories.put( DEFAULT_CATEGORYNAME,
new Category( DEFAULT_CATEGORYNAME,
null,
isVisible ) );
}
IScriptFunction function = getScriptFunction( categories.get( DEFAULT_CATEGORYNAME ),
null,
configElems[i] );
if ( function != null )
categories.get( DEFAULT_CATEGORYNAME )
.addFunction( function );
}
// Populate the .js script library
else if ( configElems[i].getName( ).equals( ELEMENT_JSLIB ) )
{
populateResources( jsLibs, ".js", configElems[i] );
}
}
}
catch ( BirtException ex )
{
ex.printStackTrace( );
}
}
return categories;
}
| private synchronized Map<String, Category> getCategoryMap( )
{
if ( categories != null )
return categories;
categories = new HashMap<String, Category>( );
//Find the extension point.
IExtensionRegistry extReg = Platform.getExtensionRegistry( );
IExtensionPoint extPoint = extReg.getExtensionPoint( EXTENSION_POINT );
if ( extPoint == null )
return categories;
//Fetch all extensions
IExtension[] exts = extPoint.getExtensions( );
if ( exts == null )
{
return categories;
}
//populate category map as per extension.
for ( int e = 0; e < exts.length; e++ )
{
try
{
IConfigurationElement[] configElems = exts[e].getConfigurationElements( );
if ( configElems == null )
continue;
for ( int i = 0; i < configElems.length; i++ )
{
boolean isVisible = extractBoolean( configElems[i].getAttribute( ATTRIBUTE_ISVISIBLE ),
true );
// for element Category
if ( configElems[i].getName( ).equals( ELEMENT_CATEGORY ) )
{
Category category = new Category( configElems[i].getAttribute( ATTRIBUTE_NAME ),
configElems[i].getAttribute( ATTRIBUTE_DESC ),
isVisible );
categories.put( category.getName( ), category );
IScriptFunctionFactory factory = null;
if ( configElems[i].getAttribute( ATTRIBUTE_FACTORYCLASS ) != null )
factory = (IScriptFunctionFactory) configElems[i].createExecutableExtension( ATTRIBUTE_FACTORYCLASS );
IConfigurationElement[] functions = configElems[i].getChildren( ELEMENT_FUNCTION );
for ( int j = 0; j < functions.length; j++ )
{
IScriptFunction function = getScriptFunction( category,
factory,
functions[j] );
if ( function != null )
category.addFunction( function );
}
}
// For element function that are not under certain category.
// Usually those functions are
// defined in .js file
else if ( configElems[i].getName( )
.equals( ELEMENT_FUNCTION ) )
{
if ( categories.get( DEFAULT_CATEGORYNAME ) == null )
{
categories.put( DEFAULT_CATEGORYNAME,
new Category( DEFAULT_CATEGORYNAME,
null,
isVisible ) );
}
IScriptFunction function = getScriptFunction( categories.get( DEFAULT_CATEGORYNAME ),
null,
configElems[i] );
if ( function != null )
categories.get( DEFAULT_CATEGORYNAME )
.addFunction( function );
}
// Populate the .js script library
else if ( configElems[i].getName( ).equals( ELEMENT_JSLIB ) )
{
populateResources( jsLibs, ".js", configElems[i] );
}
}
}
catch ( BirtException ex )
{
ex.printStackTrace( );
}
}
return categories;
}
|
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java
index 971465dedb..fe276e6fff 100644
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/TestRecursivePrefixTreeStrategy.java
@@ -1,217 +1,217 @@
package org.apache.lucene.spatial.prefix;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.io.GeohashUtils;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.spatial.SpatialMatchConcern;
import org.apache.lucene.spatial.StrategyTestCase;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class TestRecursivePrefixTreeStrategy extends StrategyTestCase {
private int maxLength;
//Tests should call this first.
private void init(int maxLength) {
this.maxLength = maxLength;
this.ctx = SpatialContext.GEO;
GeohashPrefixTree grid = new GeohashPrefixTree(ctx, maxLength);
this.strategy = new RecursivePrefixTreeStrategy(grid, getClass().getSimpleName());
}
@Test
public void testFilterWithVariableScanLevel() throws IOException {
init(GeohashPrefixTree.getMaxLevelsPossible());
getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS);
//execute queries for each prefix grid scan level
for(int i = 0; i <= maxLength; i++) {
((RecursivePrefixTreeStrategy)strategy).setPrefixGridScanLevel(i);
executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_IsWithin_BBox);
}
}
@Test
public void testOneMeterPrecision() {
init(GeohashPrefixTree.getMaxLevelsPossible());
GeohashPrefixTree grid = (GeohashPrefixTree) ((RecursivePrefixTreeStrategy) strategy).getGrid();
//DWS: I know this to be true. 11 is needed for one meter
double degrees = DistanceUtils.dist2Degrees(0.001, DistanceUtils.EARTH_MEAN_RADIUS_KM);
assertEquals(11, grid.getLevelForDistance(degrees));
}
@Test
public void testPrecision() throws IOException{
init(GeohashPrefixTree.getMaxLevelsPossible());
Point iPt = ctx.makePoint(2.8028712999999925, 48.3708044);//lon, lat
addDocument(newDoc("iPt", iPt));
commit();
Point qPt = ctx.makePoint(2.4632387000000335, 48.6003516);
final double KM2DEG = DistanceUtils.dist2Degrees(1, DistanceUtils.EARTH_MEAN_RADIUS_KM);
final double DEG2KM = 1 / KM2DEG;
final double DIST = 35.75;//35.7499...
assertEquals(DIST, ctx.getDistCalc().distance(iPt, qPt) * DEG2KM, 0.001);
//distErrPct will affect the query shape precision. The indexed precision
// was set to nearly zilch via init(GeohashPrefixTree.getMaxLevelsPossible());
final double distErrPct = 0.025; //the suggested default, by the way
final double distMult = 1+distErrPct;
assertTrue(35.74*distMult >= DIST);
checkHits(q(qPt, 35.74 * KM2DEG, distErrPct), 1, null);
assertTrue(30*distMult < DIST);
checkHits(q(qPt, 30 * KM2DEG, distErrPct), 0, null);
assertTrue(33*distMult < DIST);
checkHits(q(qPt, 33 * KM2DEG, distErrPct), 0, null);
assertTrue(34*distMult < DIST);
checkHits(q(qPt, 34 * KM2DEG, distErrPct), 0, null);
}
@Test
public void geohashRecursiveRandom() throws IOException {
init(12);
//1. Iterate test with the cluster at some worldly point of interest
Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)};
for (Point clusterCenter : clusterCenters) {
//2. Iterate on size of cluster (a really small one and a large one)
String hashCenter = GeohashUtils.encodeLatLon(clusterCenter.getY(), clusterCenter.getX(), maxLength);
//calculate the number of degrees in the smallest grid box size (use for both lat & lon)
String smallBox = hashCenter.substring(0,hashCenter.length()-1);//chop off leaf precision
Rectangle clusterDims = GeohashUtils.decodeBoundary(smallBox,ctx);
double smallRadius = Math.max(clusterDims.getMaxX()-clusterDims.getMinX(),clusterDims.getMaxY()-clusterDims.getMinY());
assert smallRadius < 1;
double largeRadius = 20d;//good large size; don't use >=45 for this test code to work
double[] radiusDegs = {largeRadius,smallRadius};
for (double radiusDeg : radiusDegs) {
//3. Index random points in this cluster circle
deleteAll();
List<Point> points = new ArrayList<Point>();
for(int i = 0; i < 20; i++) {
//Note that this will not result in randomly distributed points in the
// circle, they will be concentrated towards the center a little. But
// it's good enough.
Point pt = ctx.getDistCalc().pointOnBearing(clusterCenter,
random().nextDouble() * radiusDeg, random().nextInt() * 360, ctx, null);
pt = alignGeohash(pt);
points.add(pt);
addDocument(newDoc("" + i, pt));
}
commit();
//3. Use some query centers. Each is twice the cluster's radius away.
for(int ri = 0; ri < 4; ri++) {
Point queryCenter = ctx.getDistCalc().pointOnBearing(clusterCenter,
radiusDeg*2, random().nextInt(360), ctx, null);
queryCenter = alignGeohash(queryCenter);
//4.1 Query a small box getting nothing
checkHits(q(queryCenter, radiusDeg - smallRadius/2), 0, null);
//4.2 Query a large box enclosing the cluster, getting everything
- checkHits(q(queryCenter, radiusDeg*3*1.01), points.size(), null);
+ checkHits(q(queryCenter, radiusDeg*3 + smallRadius/2), points.size(), null);
//4.3 Query a medium box getting some (calculate the correct solution and verify)
double queryDist = radiusDeg * 2;
//Find matching points. Put into int[] of doc ids which is the same thing as the index into points list.
int[] ids = new int[points.size()];
int ids_sz = 0;
for (int i = 0; i < points.size(); i++) {
Point point = points.get(i);
if (ctx.getDistCalc().distance(queryCenter, point) <= queryDist)
ids[ids_sz++] = i;
}
ids = Arrays.copyOf(ids, ids_sz);
//assert ids_sz > 0 (can't because randomness keeps us from being able to)
checkHits(q(queryCenter, queryDist), ids.length, ids);
}
}//for radiusDeg
}//for clusterCenter
}//randomTest()
/** Query point-distance (in degrees) with zero error percent. */
private SpatialArgs q(Point pt, double distDEG) {
return q(pt, distDEG, 0.0);
}
private SpatialArgs q(Point pt, double distDEG, double distErrPct) {
Shape shape = ctx.makeCircle(pt, distDEG);
SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects,shape);
args.setDistErrPct(distErrPct);
return args;
}
private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds) {
SearchResults got = executeQuery(strategy.makeQuery(args), 100);
assertEquals("" + args, assertNumFound, got.numFound);
if (assertIds != null) {
Set<Integer> gotIds = new HashSet<Integer>();
for (SearchResult result : got.results) {
gotIds.add(Integer.valueOf(result.document.get("id")));
}
for (int assertId : assertIds) {
assertTrue("has "+assertId,gotIds.contains(assertId));
}
}
}
private Document newDoc(String id, Shape shape) {
Document doc = new Document();
doc.add(new StringField("id", id, Field.Store.YES));
for (IndexableField f : strategy.createIndexableFields(shape)) {
doc.add(f);
}
if (storeShape)
doc.add(new StoredField(strategy.getFieldName(), ctx.toString(shape)));
return doc;
}
/** NGeohash round-trip for given precision. */
private Point alignGeohash(Point p) {
return GeohashUtils.decode(GeohashUtils.encodeLatLon(p.getY(), p.getX(), maxLength), ctx);
}
}
| true | true | public void geohashRecursiveRandom() throws IOException {
init(12);
//1. Iterate test with the cluster at some worldly point of interest
Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)};
for (Point clusterCenter : clusterCenters) {
//2. Iterate on size of cluster (a really small one and a large one)
String hashCenter = GeohashUtils.encodeLatLon(clusterCenter.getY(), clusterCenter.getX(), maxLength);
//calculate the number of degrees in the smallest grid box size (use for both lat & lon)
String smallBox = hashCenter.substring(0,hashCenter.length()-1);//chop off leaf precision
Rectangle clusterDims = GeohashUtils.decodeBoundary(smallBox,ctx);
double smallRadius = Math.max(clusterDims.getMaxX()-clusterDims.getMinX(),clusterDims.getMaxY()-clusterDims.getMinY());
assert smallRadius < 1;
double largeRadius = 20d;//good large size; don't use >=45 for this test code to work
double[] radiusDegs = {largeRadius,smallRadius};
for (double radiusDeg : radiusDegs) {
//3. Index random points in this cluster circle
deleteAll();
List<Point> points = new ArrayList<Point>();
for(int i = 0; i < 20; i++) {
//Note that this will not result in randomly distributed points in the
// circle, they will be concentrated towards the center a little. But
// it's good enough.
Point pt = ctx.getDistCalc().pointOnBearing(clusterCenter,
random().nextDouble() * radiusDeg, random().nextInt() * 360, ctx, null);
pt = alignGeohash(pt);
points.add(pt);
addDocument(newDoc("" + i, pt));
}
commit();
//3. Use some query centers. Each is twice the cluster's radius away.
for(int ri = 0; ri < 4; ri++) {
Point queryCenter = ctx.getDistCalc().pointOnBearing(clusterCenter,
radiusDeg*2, random().nextInt(360), ctx, null);
queryCenter = alignGeohash(queryCenter);
//4.1 Query a small box getting nothing
checkHits(q(queryCenter, radiusDeg - smallRadius/2), 0, null);
//4.2 Query a large box enclosing the cluster, getting everything
checkHits(q(queryCenter, radiusDeg*3*1.01), points.size(), null);
//4.3 Query a medium box getting some (calculate the correct solution and verify)
double queryDist = radiusDeg * 2;
//Find matching points. Put into int[] of doc ids which is the same thing as the index into points list.
int[] ids = new int[points.size()];
int ids_sz = 0;
for (int i = 0; i < points.size(); i++) {
Point point = points.get(i);
if (ctx.getDistCalc().distance(queryCenter, point) <= queryDist)
ids[ids_sz++] = i;
}
ids = Arrays.copyOf(ids, ids_sz);
//assert ids_sz > 0 (can't because randomness keeps us from being able to)
checkHits(q(queryCenter, queryDist), ids.length, ids);
}
}//for radiusDeg
}//for clusterCenter
}//randomTest()
| public void geohashRecursiveRandom() throws IOException {
init(12);
//1. Iterate test with the cluster at some worldly point of interest
Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)};
for (Point clusterCenter : clusterCenters) {
//2. Iterate on size of cluster (a really small one and a large one)
String hashCenter = GeohashUtils.encodeLatLon(clusterCenter.getY(), clusterCenter.getX(), maxLength);
//calculate the number of degrees in the smallest grid box size (use for both lat & lon)
String smallBox = hashCenter.substring(0,hashCenter.length()-1);//chop off leaf precision
Rectangle clusterDims = GeohashUtils.decodeBoundary(smallBox,ctx);
double smallRadius = Math.max(clusterDims.getMaxX()-clusterDims.getMinX(),clusterDims.getMaxY()-clusterDims.getMinY());
assert smallRadius < 1;
double largeRadius = 20d;//good large size; don't use >=45 for this test code to work
double[] radiusDegs = {largeRadius,smallRadius};
for (double radiusDeg : radiusDegs) {
//3. Index random points in this cluster circle
deleteAll();
List<Point> points = new ArrayList<Point>();
for(int i = 0; i < 20; i++) {
//Note that this will not result in randomly distributed points in the
// circle, they will be concentrated towards the center a little. But
// it's good enough.
Point pt = ctx.getDistCalc().pointOnBearing(clusterCenter,
random().nextDouble() * radiusDeg, random().nextInt() * 360, ctx, null);
pt = alignGeohash(pt);
points.add(pt);
addDocument(newDoc("" + i, pt));
}
commit();
//3. Use some query centers. Each is twice the cluster's radius away.
for(int ri = 0; ri < 4; ri++) {
Point queryCenter = ctx.getDistCalc().pointOnBearing(clusterCenter,
radiusDeg*2, random().nextInt(360), ctx, null);
queryCenter = alignGeohash(queryCenter);
//4.1 Query a small box getting nothing
checkHits(q(queryCenter, radiusDeg - smallRadius/2), 0, null);
//4.2 Query a large box enclosing the cluster, getting everything
checkHits(q(queryCenter, radiusDeg*3 + smallRadius/2), points.size(), null);
//4.3 Query a medium box getting some (calculate the correct solution and verify)
double queryDist = radiusDeg * 2;
//Find matching points. Put into int[] of doc ids which is the same thing as the index into points list.
int[] ids = new int[points.size()];
int ids_sz = 0;
for (int i = 0; i < points.size(); i++) {
Point point = points.get(i);
if (ctx.getDistCalc().distance(queryCenter, point) <= queryDist)
ids[ids_sz++] = i;
}
ids = Arrays.copyOf(ids, ids_sz);
//assert ids_sz > 0 (can't because randomness keeps us from being able to)
checkHits(q(queryCenter, queryDist), ids.length, ids);
}
}//for radiusDeg
}//for clusterCenter
}//randomTest()
|
diff --git a/src/test/java/nl/finalist/datomic/intro/Tests.java b/src/test/java/nl/finalist/datomic/intro/Tests.java
index eea3a38..7bd5c32 100644
--- a/src/test/java/nl/finalist/datomic/intro/Tests.java
+++ b/src/test/java/nl/finalist/datomic/intro/Tests.java
@@ -1,126 +1,126 @@
package nl.finalist.datomic.intro;
import static org.junit.Assert.assertEquals;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import datomic.Connection;
import datomic.Entity;
import datomic.Peer;
public class Tests
{
private static final Logger LOGGER = LoggerFactory.getLogger( Tests.class );
@Test
public void parseDatomicFileAndRunTransaction()
{
final String uri = "datomic:mem://players";
LOGGER.info( "Creating and connecting to database at {}", uri );
Connection conn = Main.createAndConnect( uri );
// Adding schema and data (1)
LOGGER.info( "Adding schema and data with attrs: name, country, person/born, person/height, player/position" );
Main.parseDatomicFileAndRunTransaction( "data/schema-1.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-1.dtm", conn );
// Exercise 1
LOGGER.info( "Find all entities" );
// Task: define the query
- String query = "";
+ String query = "[:find ?p :in $ :where [?p :name]]";
Collection<List<Object>> results = Peer.q( query, conn.db() );
assertEquals( 153, results.size() );
List<Entity> entities = Helper.entities( results, conn.db() );
Helper.printEntities( entities );
// Exercise 2
LOGGER.info( "Find all persons" );
// Task: define the query
- query = "";
+ query = "[:find ?p :in $ :where [?p :person/height _]]";
results = Peer.q( query, conn.db() );
assertEquals( 85, results.size() );
Helper.printEntities( Helper.entities( results, conn.db() ) );
// Adding schema and data (2)
LOGGER.info( "Adding attributes to schema: player/team, player/salary + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-2.dtm", conn );
Main.loadPlayerTeamAndSalary( "data/data-2-2011.csv", conn );
// Exercise 3
LOGGER.info( "Find team and salary for Zlatan Ibrahimovic" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
List<Object> tuple = results.iterator().next();
assertEquals( 2, tuple.size() );
assertEquals( "AC Milan", tuple.get( 0 ) );
assertEquals( 9.0, tuple.get( 1 ) );
LOGGER.info( "Added data for Zlatan Ibrahimovic, team: {}, salary: {}", tuple.toArray() );
// Find instant for Zlatans salary addition
LOGGER.info( "Find instant when Zlatans salary was recorded" );
query = "[:find ?instant :in $ ?n :where [?p :name ?n] [?tx :db/txInstant ?instant]]";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
Date year2011 = (Date)results.iterator().next().get( 0 );
LOGGER.info( "Salary data for Zlatan added on {}", year2011 );
// Add data for 2012
LOGGER.info( "Loading player team and salary data for 2012" );
Main.loadPlayerTeamAndSalary( "data/data-2-2012.csv", conn );
// Exercise 4
LOGGER.info( "List name, team and salary, ordered by salary (desc) for 2011" );
query = "";
// Task: change the argument
results = Peer.q( query, conn.db() );
List<List<Object>> values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
assertEquals( "Cristiano Ronaldo", values.get( 0 ).get( 0 ) );
// Add schema and data (3)
LOGGER.info( "Adding Twitter user attributes to schema + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-3.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-3.dtm", conn );
// Exercise 5
LOGGER.info( "Find Twitter screenName and followersCount where followersCount > a million" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db() );
assertEquals( 21, results.size() );
values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
// Add schema and data (4)
LOGGER.info( "Adding attributes to schema: player/twitter.screenName + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-4.dtm", conn );
Main.loadPlayerTwitterScreenName( "data/data-4.csv", conn );
// Exercise 6
LOGGER.info( "Find names of players who are following Robin van Persie on Twitter" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Robin van Persie" );
assertEquals( 23, results.size() );
Helper.printValues( Helper.list( results ) );
}
}
| false | true | public void parseDatomicFileAndRunTransaction()
{
final String uri = "datomic:mem://players";
LOGGER.info( "Creating and connecting to database at {}", uri );
Connection conn = Main.createAndConnect( uri );
// Adding schema and data (1)
LOGGER.info( "Adding schema and data with attrs: name, country, person/born, person/height, player/position" );
Main.parseDatomicFileAndRunTransaction( "data/schema-1.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-1.dtm", conn );
// Exercise 1
LOGGER.info( "Find all entities" );
// Task: define the query
String query = "";
Collection<List<Object>> results = Peer.q( query, conn.db() );
assertEquals( 153, results.size() );
List<Entity> entities = Helper.entities( results, conn.db() );
Helper.printEntities( entities );
// Exercise 2
LOGGER.info( "Find all persons" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db() );
assertEquals( 85, results.size() );
Helper.printEntities( Helper.entities( results, conn.db() ) );
// Adding schema and data (2)
LOGGER.info( "Adding attributes to schema: player/team, player/salary + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-2.dtm", conn );
Main.loadPlayerTeamAndSalary( "data/data-2-2011.csv", conn );
// Exercise 3
LOGGER.info( "Find team and salary for Zlatan Ibrahimovic" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
List<Object> tuple = results.iterator().next();
assertEquals( 2, tuple.size() );
assertEquals( "AC Milan", tuple.get( 0 ) );
assertEquals( 9.0, tuple.get( 1 ) );
LOGGER.info( "Added data for Zlatan Ibrahimovic, team: {}, salary: {}", tuple.toArray() );
// Find instant for Zlatans salary addition
LOGGER.info( "Find instant when Zlatans salary was recorded" );
query = "[:find ?instant :in $ ?n :where [?p :name ?n] [?tx :db/txInstant ?instant]]";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
Date year2011 = (Date)results.iterator().next().get( 0 );
LOGGER.info( "Salary data for Zlatan added on {}", year2011 );
// Add data for 2012
LOGGER.info( "Loading player team and salary data for 2012" );
Main.loadPlayerTeamAndSalary( "data/data-2-2012.csv", conn );
// Exercise 4
LOGGER.info( "List name, team and salary, ordered by salary (desc) for 2011" );
query = "";
// Task: change the argument
results = Peer.q( query, conn.db() );
List<List<Object>> values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
assertEquals( "Cristiano Ronaldo", values.get( 0 ).get( 0 ) );
// Add schema and data (3)
LOGGER.info( "Adding Twitter user attributes to schema + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-3.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-3.dtm", conn );
// Exercise 5
LOGGER.info( "Find Twitter screenName and followersCount where followersCount > a million" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db() );
assertEquals( 21, results.size() );
values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
// Add schema and data (4)
LOGGER.info( "Adding attributes to schema: player/twitter.screenName + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-4.dtm", conn );
Main.loadPlayerTwitterScreenName( "data/data-4.csv", conn );
// Exercise 6
LOGGER.info( "Find names of players who are following Robin van Persie on Twitter" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Robin van Persie" );
assertEquals( 23, results.size() );
Helper.printValues( Helper.list( results ) );
}
| public void parseDatomicFileAndRunTransaction()
{
final String uri = "datomic:mem://players";
LOGGER.info( "Creating and connecting to database at {}", uri );
Connection conn = Main.createAndConnect( uri );
// Adding schema and data (1)
LOGGER.info( "Adding schema and data with attrs: name, country, person/born, person/height, player/position" );
Main.parseDatomicFileAndRunTransaction( "data/schema-1.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-1.dtm", conn );
// Exercise 1
LOGGER.info( "Find all entities" );
// Task: define the query
String query = "[:find ?p :in $ :where [?p :name]]";
Collection<List<Object>> results = Peer.q( query, conn.db() );
assertEquals( 153, results.size() );
List<Entity> entities = Helper.entities( results, conn.db() );
Helper.printEntities( entities );
// Exercise 2
LOGGER.info( "Find all persons" );
// Task: define the query
query = "[:find ?p :in $ :where [?p :person/height _]]";
results = Peer.q( query, conn.db() );
assertEquals( 85, results.size() );
Helper.printEntities( Helper.entities( results, conn.db() ) );
// Adding schema and data (2)
LOGGER.info( "Adding attributes to schema: player/team, player/salary + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-2.dtm", conn );
Main.loadPlayerTeamAndSalary( "data/data-2-2011.csv", conn );
// Exercise 3
LOGGER.info( "Find team and salary for Zlatan Ibrahimovic" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
List<Object> tuple = results.iterator().next();
assertEquals( 2, tuple.size() );
assertEquals( "AC Milan", tuple.get( 0 ) );
assertEquals( 9.0, tuple.get( 1 ) );
LOGGER.info( "Added data for Zlatan Ibrahimovic, team: {}, salary: {}", tuple.toArray() );
// Find instant for Zlatans salary addition
LOGGER.info( "Find instant when Zlatans salary was recorded" );
query = "[:find ?instant :in $ ?n :where [?p :name ?n] [?tx :db/txInstant ?instant]]";
results = Peer.q( query, conn.db(), "Zlatan Ibrahimovic" );
Date year2011 = (Date)results.iterator().next().get( 0 );
LOGGER.info( "Salary data for Zlatan added on {}", year2011 );
// Add data for 2012
LOGGER.info( "Loading player team and salary data for 2012" );
Main.loadPlayerTeamAndSalary( "data/data-2-2012.csv", conn );
// Exercise 4
LOGGER.info( "List name, team and salary, ordered by salary (desc) for 2011" );
query = "";
// Task: change the argument
results = Peer.q( query, conn.db() );
List<List<Object>> values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
assertEquals( "Cristiano Ronaldo", values.get( 0 ).get( 0 ) );
// Add schema and data (3)
LOGGER.info( "Adding Twitter user attributes to schema + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-3.dtm", conn );
Main.parseDatomicFileAndRunTransaction( "data/data-3.dtm", conn );
// Exercise 5
LOGGER.info( "Find Twitter screenName and followersCount where followersCount > a million" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db() );
assertEquals( 21, results.size() );
values = Helper.sort( Helper.list( results ), 1, "DESC" );
Helper.printValues( values );
// Add schema and data (4)
LOGGER.info( "Adding attributes to schema: player/twitter.screenName + data" );
Main.parseDatomicFileAndRunTransaction( "data/schema-4.dtm", conn );
Main.loadPlayerTwitterScreenName( "data/data-4.csv", conn );
// Exercise 6
LOGGER.info( "Find names of players who are following Robin van Persie on Twitter" );
// Task: define the query
query = "";
results = Peer.q( query, conn.db(), "Robin van Persie" );
assertEquals( 23, results.size() );
Helper.printValues( Helper.list( results ) );
}
|
diff --git a/src/main/java/com/censoredsoftware/Demigods/Engine/Runnable/BattleRunnable.java b/src/main/java/com/censoredsoftware/Demigods/Engine/Runnable/BattleRunnable.java
index 7d19bb38..dc1e4bb1 100644
--- a/src/main/java/com/censoredsoftware/Demigods/Engine/Runnable/BattleRunnable.java
+++ b/src/main/java/com/censoredsoftware/Demigods/Engine/Runnable/BattleRunnable.java
@@ -1,27 +1,28 @@
package com.censoredsoftware.Demigods.Engine.Runnable;
import org.bukkit.Bukkit;
import org.bukkit.scheduler.BukkitRunnable;
import com.censoredsoftware.Demigods.Engine.Object.Battle.Battle;
public class BattleRunnable extends BukkitRunnable
{
@Override
public void run()
{
// Battle onTick logic
for(Battle battle : Battle.getAllActive())
{
if(battle.getMeta().getKills() > battle.getMaxKills() || battle.getStartTime() + battle.getDuration() <= System.currentTimeMillis() && battle.getMeta().getKills() > battle.getMinKills()) battle.end();
else Battle.battleBorder(battle);
}
// Delete old battles
- if(Battle.battleQueue.size() >= (int) Math.ceil(Bukkit.getOnlinePlayers().length / 2.0))
+ int limit = (int) Math.ceil(Bukkit.getOnlinePlayers().length / 2.0);
+ if(Battle.battleQueue.size() >= (limit < 3 ? 3 : limit))
{
Battle delete = Battle.battleQueue.poll();
if(delete != null) delete.delete();
}
}
}
| true | true | public void run()
{
// Battle onTick logic
for(Battle battle : Battle.getAllActive())
{
if(battle.getMeta().getKills() > battle.getMaxKills() || battle.getStartTime() + battle.getDuration() <= System.currentTimeMillis() && battle.getMeta().getKills() > battle.getMinKills()) battle.end();
else Battle.battleBorder(battle);
}
// Delete old battles
if(Battle.battleQueue.size() >= (int) Math.ceil(Bukkit.getOnlinePlayers().length / 2.0))
{
Battle delete = Battle.battleQueue.poll();
if(delete != null) delete.delete();
}
}
| public void run()
{
// Battle onTick logic
for(Battle battle : Battle.getAllActive())
{
if(battle.getMeta().getKills() > battle.getMaxKills() || battle.getStartTime() + battle.getDuration() <= System.currentTimeMillis() && battle.getMeta().getKills() > battle.getMinKills()) battle.end();
else Battle.battleBorder(battle);
}
// Delete old battles
int limit = (int) Math.ceil(Bukkit.getOnlinePlayers().length / 2.0);
if(Battle.battleQueue.size() >= (limit < 3 ? 3 : limit))
{
Battle delete = Battle.battleQueue.poll();
if(delete != null) delete.delete();
}
}
|
diff --git a/shanks-core/src/main/java/es/upm/dit/gsi/shanks/model/scenario/Scenario.java b/shanks-core/src/main/java/es/upm/dit/gsi/shanks/model/scenario/Scenario.java
index 85f52557..6dda7a6f 100644
--- a/shanks-core/src/main/java/es/upm/dit/gsi/shanks/model/scenario/Scenario.java
+++ b/shanks-core/src/main/java/es/upm/dit/gsi/shanks/model/scenario/Scenario.java
@@ -1,517 +1,521 @@
package es.upm.dit.gsi.shanks.model.scenario;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.logging.Logger;
import ec.util.MersenneTwisterFast;
import es.upm.dit.gsi.shanks.model.element.NetworkElement;
import es.upm.dit.gsi.shanks.model.element.exception.TooManyConnectionException;
import es.upm.dit.gsi.shanks.model.element.exception.UnsupportedNetworkElementStatusException;
import es.upm.dit.gsi.shanks.model.failure.Failure;
import es.upm.dit.gsi.shanks.model.failure.exception.NoCombinationForFailureException;
import es.upm.dit.gsi.shanks.model.failure.exception.UnsupportedElementInFailureException;
import es.upm.dit.gsi.shanks.model.scenario.exception.DuplicatedIDException;
import es.upm.dit.gsi.shanks.model.scenario.exception.ScenarioNotFoundException;
import es.upm.dit.gsi.shanks.model.scenario.exception.UnsupportedScenarioStatusException;
import es.upm.dit.gsi.shanks.model.scenario.portrayal.Scenario2DPortrayal;
import es.upm.dit.gsi.shanks.model.scenario.portrayal.Scenario3DPortrayal;
import es.upm.dit.gsi.shanks.model.scenario.portrayal.ScenarioPortrayal;
import es.upm.dit.gsi.shanks.model.scenario.portrayal.exception.DuplicatedPortrayalIDException;
/**
* Scenarios class
*
* This class create the different scenarios
*
* @author Daniel Lara
* @author a.carrera
* @version 0.1.1
*
*/
/**
* @author a.carrera
*
*/
public abstract class Scenario {
private Logger logger = Logger.getLogger(Scenario.class.getName());
public static final String SIMULATION_GUI = "SIMULATION GUI";
public static final String SIMULATION_2D = "2D";
public static final String SIMULATION_3D = "3D";
public static final String NO_GUI = "NO GUI";
private String id;
private Properties properties;
private List<String> possibleStates;
private String currentStatus;
private HashMap<String, NetworkElement> currentElements;
private HashMap<Failure, Integer> currentFailures;
private HashMap<Class<? extends Failure>, List<Set<NetworkElement>>> possibleFailures;
private HashMap<Class<? extends Failure>, List<Integer>> generatedFailureConfigurations;
/**
* Constructor of scenario
*
* @param id
* @param initialState
* @param properties
* @throws UnsupportedNetworkElementStatusException
* @throws TooManyConnectionException
* @throws UnsupportedScenarioStatusException
* @throws DuplicatedIDException
*/
public Scenario(String id, String initialState, Properties properties)
throws UnsupportedNetworkElementStatusException,
TooManyConnectionException, UnsupportedScenarioStatusException,
DuplicatedIDException {
this.id = id;
this.setProperties(properties);
this.possibleStates = new ArrayList<String>();
this.currentElements = new HashMap<String, NetworkElement>();
this.currentFailures = new HashMap<Failure, Integer>();
this.possibleFailures = new HashMap<Class<? extends Failure>, List<Set<NetworkElement>>>();
this.generatedFailureConfigurations = new HashMap<Class<? extends Failure>, List<Integer>>();
this.setPossibleStates();
this.addNetworkElements();
this.addPossibleFailures();
this.setCurrentStatus(initialState);
logger.info("Scenario " + this.getID() + " successfully created.");
}
/**
* Create the scenario portrayal (2D o 3D).
*
* @return Scenario2DPortrayal or Scenario3DPortrayal object
* @throws DuplicatedPortrayalIDException
* @throws ScenarioNotFoundException
*/
public ScenarioPortrayal createScenarioPortrayal() throws DuplicatedPortrayalIDException, ScenarioNotFoundException {
logger.fine("Creating Scenario Portrayal...");
String dimensions = (String) this.getProperty(Scenario.SIMULATION_GUI);
if (dimensions.equals(Scenario.SIMULATION_2D)) {
logger.fine("Creating Scenario2DPortrayal");
return this.createScenario2DPortrayal();
} else if (dimensions.equals(Scenario.SIMULATION_3D)){
logger.fine("Creating Scenario3DPortrayal");
return this.createScenario3DPortrayal();
} else if (dimensions.equals(Scenario.NO_GUI)) {
return null;
}
return null;
}
/**
* @return a Scenario3DPortrayal
* @throws DuplicatedPortrayalIDException
* @throws ScenarioNotFoundException
*/
abstract public Scenario2DPortrayal createScenario2DPortrayal() throws DuplicatedPortrayalIDException, ScenarioNotFoundException;
/**
* @return a Scenario2DPortrayal
* @throws DuplicatedPortrayalIDException
* @throws ScenarioNotFoundException
*/
abstract public Scenario3DPortrayal createScenario3DPortrayal() throws DuplicatedPortrayalIDException, ScenarioNotFoundException;
/**
* @return the id
*/
public String getID() {
return id;
}
/**
* @return the currentStatus
*/
public String getCurrentStatus() {
return currentStatus;
}
/**
* @param desiredStatus
* the currentStatus to set
* @return true if the status was set correctly and false if the status is
* not a possible status of the network element
* @throws UnsupportedNetworkElementStatusException
*/
public boolean setCurrentStatus(String desiredStatus)
throws UnsupportedScenarioStatusException {
if (this.isPossibleStatus(desiredStatus)) {
this.currentStatus = desiredStatus;
return true;
} else {
logger.warning("Impossible to set status: " + desiredStatus
+ ". This network element " + this.getID()
+ "does not support this status.");
throw new UnsupportedScenarioStatusException();
}
}
/**
* @param possibleStatus
* @return
*/
private boolean isPossibleStatus(String possibleStatus) {
if (this.possibleStates.contains(possibleStatus)) {
return true;
} else {
return false;
}
}
/**
* @param possibleStatus
*/
public void addPossibleStatus(String possibleStatus) {
if (!this.possibleStates.contains(possibleStatus)) {
this.possibleStates.add(possibleStatus);
logger.fine("Status: " + possibleStatus
+ " was added as possible status to Scenario "
+ this.getID());
} else {
logger.fine("Status: " + possibleStatus
+ " was already added as possible status to Scenario "
+ this.getID());
}
}
/**
* @param possibleStatus
*/
public void removePossibleStatus(String possibleStatus) {
if (this.possibleStates.contains(possibleStatus)) {
this.possibleStates.remove(possibleStatus);
logger.fine("Status: " + possibleStatus
+ " was removed as possible status to Scenario "
+ this.getID());
} else {
logger.fine("Status: " + possibleStatus
+ " was not removed as possible status to Scenario "
+ this.getID()
+ " because it was not a possible status previously.");
}
}
/**
* @param element
* @throws DuplicatedIDException
*/
public void addNetworkElement(NetworkElement element)
throws DuplicatedIDException {
if (!this.currentElements.containsKey(element.getID())) {
this.currentElements.put(element.getID(), element);
} else {
throw new DuplicatedIDException(element);
}
}
/**
* @param element
*/
public void removeNetworkElement(NetworkElement element) {
this.currentElements.remove(element.getID());
}
/**
* @return Map with key: NetworkElementID and value: NetworkElement
*/
public HashMap<String, NetworkElement> getCurrentElements() {
return this.currentElements;
}
/**
* @return the properties
*/
public Properties getProperties() {
return properties;
}
/**
* @param properties the properties to set
*/
public void setProperties(Properties properties) {
this.properties = properties;
}
/**
* @param propertyKey
* @return the property value
*/
public Object getProperty(String propertyKey) {
return this.properties.getProperty(propertyKey);
}
/**
* @param propertyKey
* @param propertyValue
*/
public void addProperty(String propertyKey, String propertyValue) {
this.properties.put(propertyKey, propertyValue);
}
/**
* @param propertyKey
*/
public void removePorperty(String propertyKey) {
this.properties.remove(propertyKey);
}
/**
*
*
* @param failure
* Failure to add
* @param configuration
* Configuration of the failure
*/
public void addFailure(Failure failure, int configuration) {
if (this.possibleFailures.containsKey(failure.getClass())) {
this.currentFailures.put(failure, configuration);
} else {
logger.warning("Failure was not added, because this scenario does not support this type of Failure. Failure type: "
+ failure.getClass().getName());
}
}
/**
* @param failure
*/
public void removeFailure(Failure failure) {
this.currentFailures.remove(failure);
}
/**
* @return set of current active failures in the scenario
*/
public Set<Failure> getCurrentFailures() {
return this.currentFailures.keySet();
}
/**
* @return Map with key: Failure object and value: Combination Number
*/
protected HashMap<Failure,Integer> getFullCurrentFailures() {
return this.currentFailures;
}
/**
* @param failure
* @param possibleCombinations
*/
public void addPossibleFailure(Class<? extends Failure> failure,
List<Set<NetworkElement>> possibleCombinations) {
this.possibleFailures.put(failure, possibleCombinations);
}
/**
* @param failure
* @param set
*/
public void addPossibleFailure(Class<? extends Failure> failure,
Set<NetworkElement> set) {
List<Set<NetworkElement>> list = new ArrayList<Set<NetworkElement>>();
list.add(set);
this.possibleFailures.put(failure, list);
}
/**
* @param failure
* @param element
*/
public void addPossibleFailure(Class<? extends Failure> failure,
NetworkElement element) {
List<Set<NetworkElement>> list = new ArrayList<Set<NetworkElement>>();
Set<NetworkElement> set = new HashSet<NetworkElement>();
set.add(element);
list.add(set);
this.possibleFailures.put(failure, list);
}
/**
* @param failureType
*/
public void removePossibleFailure(Class<Failure> failureType) {
this.possibleFailures.remove(failureType);
}
/**
* @return Map with key: Concrete Failure Class and value: List of combinations of the failure
*/
public HashMap<Class<? extends Failure>, List<Set<NetworkElement>>> getPossibleFailures() {
return this.possibleFailures;
}
/**
*
*/
abstract public void setPossibleStates();
/**
* @throws UnsupportedNetworkElementStatusException
* @throws TooManyConnectionException
* @throws DuplicatedIDException
*
*/
abstract public void addNetworkElements()
throws UnsupportedNetworkElementStatusException,
TooManyConnectionException, DuplicatedIDException;
/**
*
*/
abstract public void addPossibleFailures();
/**
*
* Algorithm used to generate failures during the simulation
*
* @throws UnsupportedScenarioStatusException
* @throws NoCombinationForFailureException
* @throws UnsupportedElementInFailureException
* @throws IllegalAccessException
* @throws InstantiationException
*
*/
public void generateFailures() throws UnsupportedScenarioStatusException,
NoCombinationForFailureException,
UnsupportedElementInFailureException, InstantiationException,
IllegalAccessException {
MersenneTwisterFast randomizer = new MersenneTwisterFast();
String status = this.getCurrentStatus();
HashMap<Class<? extends Failure>, Double> penalties = this
.getPenaltiesInStatus(status);
Iterator<Class<? extends Failure>> it = this.getPossibleFailures()
.keySet().iterator();
while (it.hasNext()) {
Class<? extends Failure> type = it.next();
double penalty = 0;
double prob = 0;
try {
Failure failure = type.newInstance();
List<Set<NetworkElement>> list = this.getPossibleFailures()
.get(type);
int numberOfCombinations = list.size();
int combinationNumber = randomizer.nextInt(numberOfCombinations);
try {
- // Apply penalty
- penalty = penalties.get(type);
- if (penalty > 0) {
- prob = failure.getOccurrenceProbability()
- * numberOfCombinations * penalty;
+ if (penalties.containsKey(type)) {
+ // Apply penalty
+ penalty = penalties.get(type);
+ if (penalty > 0) {
+ prob = failure.getOccurrenceProbability()
+ * numberOfCombinations * penalty;
+ } else {
+ prob = -1.0; // Impossible failure
+ }
} else {
- prob = -1.0; // Impossible failure
+ prob = failure.getOccurrenceProbability()*numberOfCombinations;
}
} catch (Exception e) {
logger.fine("There is no penalty for failures: "
+ type.getName() + " in status " + status);
}
// double aux = randomizer.nextDouble(); // THIS OPTION GENERATE MANY FAULTS OF THE SAME TYPE AT THE SAME TIME
double aux = Math.random(); // THIS WORKS BETTER, MORE RANDOMLY
if (aux < prob) {
// Generate failure
Set<NetworkElement> elementsSet;
if (numberOfCombinations >= 1) {
elementsSet = list.get(combinationNumber);
this.setupFailure(failure, elementsSet,
combinationNumber);
} else if (this.generatedFailureConfigurations.get(type).size() == 0) {
throw new NoCombinationForFailureException(failure);
}
}
} catch (NoCombinationForFailureException e) {
throw e;
} catch (UnsupportedElementInFailureException e) {
logger.severe("Impossible to instance failure: "
+ type.getName()
+ ". All failures must have a default constructor that calls other constructor Failure(String id, double occurrenceProbability)");
logger.severe("Exception: " + e.getMessage());
throw e;
}
}
}
/**
* @param failure
* @param elementsSet
* @throws UnsupportedElementInFailureException
*/
private void setupFailure(Failure failure, Set<NetworkElement> elementsSet,
int configurationNumber)
throws UnsupportedElementInFailureException {
for (NetworkElement element : elementsSet) {
String statusToSet = failure.getPossibleAffectedElements().get(
element.getClass());
failure.addAffectedElement(element, statusToSet);
}
if (!this.generatedFailureConfigurations.containsKey(failure.getClass())) {
this.generatedFailureConfigurations.put(failure.getClass(),
new ArrayList<Integer>());
}
List<Integer> numList = this.generatedFailureConfigurations.get(failure.getClass());
if (!numList.contains(configurationNumber)) {
numList.add(configurationNumber);
this.generatedFailureConfigurations.put(failure.getClass(), numList);
failure.activateFailure();
this.addFailure(failure, configurationNumber);
logger.fine("Generated Failure " + failure.getID() + " with configuration " + configurationNumber);
}
}
/**
* This method can return multipliers >1.0 (more probable failures) or
* <1.0(less probable failure). CAUTION: If the multiplier is <=0.0, the
* failure never happends.
*
* @param status
* @return Multiplier for each type of failure
* @throws UnsupportedScenarioStatusException
*/
abstract public HashMap<Class<? extends Failure>, Double> getPenaltiesInStatus(
String status) throws UnsupportedScenarioStatusException;
/**
* @return resolved failures
*/
public List<Failure> checkResolvedFailures() {
List<Failure> resolvedFailures = new ArrayList<Failure>();
for (Failure failure : this.currentFailures.keySet()) {
if (failure.isResolved()) {
resolvedFailures.add(failure);
List<Integer> numList = this.generatedFailureConfigurations.get(failure
.getClass());
Integer conf = this.currentFailures.get(failure);
numList.remove((Integer) this.currentFailures.get(failure));
this.generatedFailureConfigurations.put(failure.getClass(), numList);
logger.fine("Resolved failure " + failure.getID() + ". Failure class: " + failure.getClass().getName() + " with configuration " + conf);
}
}
for (Failure resolved : resolvedFailures) {
this.currentFailures.remove(resolved);
}
return resolvedFailures;
}
/**
* Return the network element with these id
*
* @param id
* @return NetworkElement object
*/
public NetworkElement getNetworkElement(String id) {
return this.currentElements.get(id);
}
}
| false | true | public void generateFailures() throws UnsupportedScenarioStatusException,
NoCombinationForFailureException,
UnsupportedElementInFailureException, InstantiationException,
IllegalAccessException {
MersenneTwisterFast randomizer = new MersenneTwisterFast();
String status = this.getCurrentStatus();
HashMap<Class<? extends Failure>, Double> penalties = this
.getPenaltiesInStatus(status);
Iterator<Class<? extends Failure>> it = this.getPossibleFailures()
.keySet().iterator();
while (it.hasNext()) {
Class<? extends Failure> type = it.next();
double penalty = 0;
double prob = 0;
try {
Failure failure = type.newInstance();
List<Set<NetworkElement>> list = this.getPossibleFailures()
.get(type);
int numberOfCombinations = list.size();
int combinationNumber = randomizer.nextInt(numberOfCombinations);
try {
// Apply penalty
penalty = penalties.get(type);
if (penalty > 0) {
prob = failure.getOccurrenceProbability()
* numberOfCombinations * penalty;
} else {
prob = -1.0; // Impossible failure
}
} catch (Exception e) {
logger.fine("There is no penalty for failures: "
+ type.getName() + " in status " + status);
}
// double aux = randomizer.nextDouble(); // THIS OPTION GENERATE MANY FAULTS OF THE SAME TYPE AT THE SAME TIME
double aux = Math.random(); // THIS WORKS BETTER, MORE RANDOMLY
if (aux < prob) {
// Generate failure
Set<NetworkElement> elementsSet;
if (numberOfCombinations >= 1) {
elementsSet = list.get(combinationNumber);
this.setupFailure(failure, elementsSet,
combinationNumber);
} else if (this.generatedFailureConfigurations.get(type).size() == 0) {
throw new NoCombinationForFailureException(failure);
}
}
} catch (NoCombinationForFailureException e) {
throw e;
} catch (UnsupportedElementInFailureException e) {
logger.severe("Impossible to instance failure: "
+ type.getName()
+ ". All failures must have a default constructor that calls other constructor Failure(String id, double occurrenceProbability)");
logger.severe("Exception: " + e.getMessage());
throw e;
}
}
}
| public void generateFailures() throws UnsupportedScenarioStatusException,
NoCombinationForFailureException,
UnsupportedElementInFailureException, InstantiationException,
IllegalAccessException {
MersenneTwisterFast randomizer = new MersenneTwisterFast();
String status = this.getCurrentStatus();
HashMap<Class<? extends Failure>, Double> penalties = this
.getPenaltiesInStatus(status);
Iterator<Class<? extends Failure>> it = this.getPossibleFailures()
.keySet().iterator();
while (it.hasNext()) {
Class<? extends Failure> type = it.next();
double penalty = 0;
double prob = 0;
try {
Failure failure = type.newInstance();
List<Set<NetworkElement>> list = this.getPossibleFailures()
.get(type);
int numberOfCombinations = list.size();
int combinationNumber = randomizer.nextInt(numberOfCombinations);
try {
if (penalties.containsKey(type)) {
// Apply penalty
penalty = penalties.get(type);
if (penalty > 0) {
prob = failure.getOccurrenceProbability()
* numberOfCombinations * penalty;
} else {
prob = -1.0; // Impossible failure
}
} else {
prob = failure.getOccurrenceProbability()*numberOfCombinations;
}
} catch (Exception e) {
logger.fine("There is no penalty for failures: "
+ type.getName() + " in status " + status);
}
// double aux = randomizer.nextDouble(); // THIS OPTION GENERATE MANY FAULTS OF THE SAME TYPE AT THE SAME TIME
double aux = Math.random(); // THIS WORKS BETTER, MORE RANDOMLY
if (aux < prob) {
// Generate failure
Set<NetworkElement> elementsSet;
if (numberOfCombinations >= 1) {
elementsSet = list.get(combinationNumber);
this.setupFailure(failure, elementsSet,
combinationNumber);
} else if (this.generatedFailureConfigurations.get(type).size() == 0) {
throw new NoCombinationForFailureException(failure);
}
}
} catch (NoCombinationForFailureException e) {
throw e;
} catch (UnsupportedElementInFailureException e) {
logger.severe("Impossible to instance failure: "
+ type.getName()
+ ". All failures must have a default constructor that calls other constructor Failure(String id, double occurrenceProbability)");
logger.severe("Exception: " + e.getMessage());
throw e;
}
}
}
|
diff --git a/src/main/java/com/happyelements/hive/web/Starter.java b/src/main/java/com/happyelements/hive/web/Starter.java
index df8ad9d..f87d42a 100755
--- a/src/main/java/com/happyelements/hive/web/Starter.java
+++ b/src/main/java/com/happyelements/hive/web/Starter.java
@@ -1,105 +1,106 @@
/*
* Copyright (c) 2012, someone All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1.Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. 2.Redistributions in binary
* form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided
* with the distribution. 3.Neither the name of the Happyelements Ltd. nor the
* names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.happyelements.hive.web;
import java.io.File;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.RollingFileAppender;
import com.happyelements.hive.web.api.GetQueryResult;
import com.happyelements.hive.web.api.GetUserQuerys;
import com.happyelements.hive.web.api.Kill;
import com.happyelements.hive.web.api.SubmitQuery;
/**
* @author <a href="mailto:[email protected]">kevin</a>
*
*/
public class Starter {
private static final Log LOGGER = LogFactory.getLog(Starter.class);
public static void initializeLogSystem(String log) throws IOException {
Starter.checkAndCreate(log);
Logger logger = Logger.getRootLogger();
logger.setLevel(Level.DEBUG);
logger.removeAllAppenders();
if (log != null) {
RollingFileAppender appender = new RollingFileAppender(
new PatternLayout("%d [%t] %-5p %c [%x] - %m%n"), new File(
log, "log.log").getPath());
appender.setMaxBackupIndex(10);
appender.setMaxFileSize("100MB");
logger.addAppender(appender);
} else {
logger.addAppender(new ConsoleAppender(new PatternLayout(
"%d [%t] %-5p %c [%x] - %m%n")));
}
}
public static void checkAndCreate(String path) throws IOException {
File file = new File(path);
if (file.exists()) {
if (!file.isDirectory()) {
throw new IOException(path + " is not directory");
}
} else if (!file.mkdirs()) {
throw new IOException("fail to create path:" + path);
}
}
public static void main(String[] args) {
try {
- if (args.length != 3) {
+ if (args.length != 4) {
System.out
.println("Usage ${hadoop} jar ${jar} ${static_root} ${log_root} ${port} ${default_url}");
return;
}
Starter.initializeLogSystem(args[1]);
Starter.LOGGER.info("initialize log system done");
Starter.LOGGER.info("starting http server at port:" + args[2]
- + " staticfiles:" + args[2] + " log_root:" + args[0]);
+ + " staticfiles:" + args[2] + " log_root:" + args[0]
+ + " defualturl:" + args[3]);
// construct and start server
- new HTTPServer(args[0], Integer.parseInt(args[2], 10), "/main.html") //
+ new HTTPServer(args[0], Integer.parseInt(args[2], 10), args[3]) //
.add(new SubmitQuery("/hwi/submitQuery.jsp", args[1])) //
.add(new GetQueryResult("/hwi/getQueryResult", args[1])) //
.add(new GetUserQuerys("/hwi/getUserQuerys.jsp")) //
.add(new Kill("/hwi/kill.jsp")).start();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| false | true | public static void main(String[] args) {
try {
if (args.length != 3) {
System.out
.println("Usage ${hadoop} jar ${jar} ${static_root} ${log_root} ${port} ${default_url}");
return;
}
Starter.initializeLogSystem(args[1]);
Starter.LOGGER.info("initialize log system done");
Starter.LOGGER.info("starting http server at port:" + args[2]
+ " staticfiles:" + args[2] + " log_root:" + args[0]);
// construct and start server
new HTTPServer(args[0], Integer.parseInt(args[2], 10), "/main.html") //
.add(new SubmitQuery("/hwi/submitQuery.jsp", args[1])) //
.add(new GetQueryResult("/hwi/getQueryResult", args[1])) //
.add(new GetUserQuerys("/hwi/getUserQuerys.jsp")) //
.add(new Kill("/hwi/kill.jsp")).start();
} catch (Exception e) {
e.printStackTrace();
}
}
| public static void main(String[] args) {
try {
if (args.length != 4) {
System.out
.println("Usage ${hadoop} jar ${jar} ${static_root} ${log_root} ${port} ${default_url}");
return;
}
Starter.initializeLogSystem(args[1]);
Starter.LOGGER.info("initialize log system done");
Starter.LOGGER.info("starting http server at port:" + args[2]
+ " staticfiles:" + args[2] + " log_root:" + args[0]
+ " defualturl:" + args[3]);
// construct and start server
new HTTPServer(args[0], Integer.parseInt(args[2], 10), args[3]) //
.add(new SubmitQuery("/hwi/submitQuery.jsp", args[1])) //
.add(new GetQueryResult("/hwi/getQueryResult", args[1])) //
.add(new GetUserQuerys("/hwi/getUserQuerys.jsp")) //
.add(new Kill("/hwi/kill.jsp")).start();
} catch (Exception e) {
e.printStackTrace();
}
}
|
diff --git a/desktop/src/net/mms_projects/copy_it/app/CopyItDesktop.java b/desktop/src/net/mms_projects/copy_it/app/CopyItDesktop.java
index 06b32d3..b6b8f49 100644
--- a/desktop/src/net/mms_projects/copy_it/app/CopyItDesktop.java
+++ b/desktop/src/net/mms_projects/copy_it/app/CopyItDesktop.java
@@ -1,314 +1,316 @@
package net.mms_projects.copy_it.app;
import net.mms_projects.copy_it.*;
import net.mms_projects.copy_it.ApplicationLock.LockException;
import net.mms_projects.copy_it.api.ServerApi;
import net.mms_projects.copy_it.api.endpoints.ClipboardContentEndpoint;
import net.mms_projects.copy_it.clipboard_backends.SwtBackend;
import net.mms_projects.copy_it.clipboard_services.AwtService;
import net.mms_projects.copy_it.sync_services.ApiService;
import net.mms_projects.copy_it.sync_services.TestService;
import net.mms_projects.copy_it.ui.AbstractUi;
import net.mms_projects.copy_it.ui.ShellUi;
import net.mms_projects.copy_it.ui.SingleCommandUi;
import net.mms_projects.copy_it.ui.SwtGui;
import net.mms_projects.utils.OSValidator;
import org.apache.commons.io.FileUtils;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.freedesktop.Notifications;
import org.freedesktop.dbus.DBusConnection;
import org.freedesktop.dbus.UInt32;
import org.freedesktop.dbus.Variant;
import org.freedesktop.dbus.exceptions.DBusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.URL;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class CopyItDesktop extends CopyIt {
static public DBusConnection dbusConnection;
static private boolean nativeLoadingInitialized;
private final Logger log = LoggerFactory.getLogger(this.getClass());
protected Settings settings;
/**
* @param args
*/
public static void main(String[] args) {
CopyItDesktop app = new CopyItDesktop();
app.run(args);
}
public static String getVersion() {
String version = "";
if (CopyItDesktop.class.getPackage().getSpecificationVersion() != null) {
version += CopyItDesktop.class.getPackage()
.getSpecificationVersion();
} else {
version += "0.0.1";
}
if (CopyItDesktop.getBuildNumber() != 0) {
version += "-" + CopyItDesktop.getBuildNumber();
}
return version;
}
public static int getBuildNumber() {
try {
return Integer.parseInt(CopyItDesktop.class.getPackage()
.getImplementationVersion());
} catch (NumberFormatException e) {
return 0;
}
}
public static File exportResource(String resource) {
Logger log = LoggerFactory.getLogger(CopyItDesktop.class);
if (!CopyItDesktop.nativeLoadingInitialized) {
System.setProperty(
"java.library.path",
System.getProperty("java.library.path")
+ System.getProperty("path.separator")
+ PathBuilder.getCacheDirectory().getAbsolutePath());
log.trace("Set the library path to: {}",
System.getProperty("java.library.path"));
Field fieldSysPath = null;
try {
fieldSysPath = ClassLoader.class.getDeclaredField("sys_paths");
fieldSysPath.setAccessible(true);
fieldSysPath.set(null, null);
} catch (SecurityException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (NoSuchFieldException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
CopyItDesktop.nativeLoadingInitialized = true;
}
log.debug("Exporting resource {}", resource);
URL inputUrl = CopyItDesktop.class.getResource("/" + resource);
File dest = new File(PathBuilder.getCacheDirectory(), resource);
if (inputUrl == null) {
log.warn(
"No input resource available while exporting resource {}. Ignoring it.",
resource);
return null;
}
try {
FileUtils.copyURLToFile(inputUrl, dest);
} catch (IOException e1) {
log.warn("Could not copy resource. This might cause issues.", e1);
}
return dest;
}
public void run(String[] args) {
log.info("The application is launched");
this.settings = new Settings();
try {
this.settings.setFileStreamBuilder(new StreamBuilder());
} catch (IOException e) {
e.printStackTrace();
System.exit(1);
}
this.settings.loadProperties();
ExecutorService executor = Executors.newSingleThreadExecutor(Executors
.defaultThreadFactory());
final ClipboardManager clipboardManager = new ClipboardManager();
clipboardManager.setExecutor(executor);
final SyncManager syncManager = new SyncManager(clipboardManager);
syncManager.setExecutor(executor);
ServerApi api = new ServerApi();
api.deviceId = (settings.get("device.id") != null) ? UUID.fromString(settings.get("device.id")) : null;
api.devicePassword = settings.get("device.password");
TestService testService = new TestService(syncManager);
final ApiService apiService = new ApiService(syncManager,
new ClipboardContentEndpoint(api));
final SyncingThread syncThread = new SyncingThread(syncManager,
new ClipboardContentEndpoint(api));
SettingsListener apiServiceListener = new SettingsListener() {
@Override
public void onChange(String key, String value) {
ServerApi api = new ServerApi();
api.deviceId = UUID.fromString(settings.get("device.id"));
api.devicePassword = settings.get("device.password");
ClipboardContentEndpoint endpoint = new ClipboardContentEndpoint(
api);
apiService.setEndpoint(endpoint);
syncThread.setEndpoint(endpoint);
}
};
this.settings.addListener("device.id", apiServiceListener);
this.settings.addListener("device.password", apiServiceListener);
syncManager.addPushService(apiService);
syncManager.addPullService(apiService);
syncManager.addPushService(testService);
syncManager.addPullingService(syncThread);
syncManager.addPullingService(testService);
syncManager.addListener(new SyncListener() {
@Override
public void onPushed(String content, Date date) {
log.debug("The following content was pushed: {}", content);
}
@Override
public void onPulled(String content, Date date) {
log.debug("The following content was pulled: {}", content);
}
});
this.settings.addListener("sync.polling.enabled",
new SettingsListener() {
@Override
public void onChange(String key, String value) {
if (Boolean.parseBoolean(value)) {
syncManager.activatePolling();
clipboardManager.activatePolling();
log.debug("The sync manager and clipboard manager have been enabled");
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
log.debug("The sync manager and clipboard manager have been disabled");
}
}
});
if (this.settings.getBoolean("sync.polling.enabled")) {
syncManager.activatePolling();
clipboardManager.activatePolling();
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
}
if (OSValidator.isUnix()) {
this.exportResource("libunix-java.so");
try {
CopyItDesktop.dbusConnection = DBusConnection
.getConnection(DBusConnection.SESSION);
} catch (DBusException e1) {
// TODO Auto-generated catch block
log.error(
"Ahh could not connect to D-Bus. All kinds of explosions n'stuff. Fix it!",
e1);
e1.printStackTrace();
System.exit(1);
}
}
final ApplicationLock appLock = new ApplicationLock(
PathBuilder.getConfigDirectory());
if (appLock.isRunning()) {
String message = "An instance is already running. ";
if (OSValidator.isUnix()) {
try {
Notifications notify = CopyItDesktop.dbusConnection
.getRemoteObject("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
Notifications.class);
Map<String, Variant<Byte>> hints = new HashMap<String, Variant<Byte>>();
hints.put("urgency", new Variant<Byte>((byte) 2));
notify.Notify("CopyIt", new UInt32(0), "", "CopyIt",
message, new LinkedList<String>(), hints, -1);
} catch (DBusException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
log.info(message);
System.exit(0);
} else {
try {
appLock.lock();
} catch (LockException e) {
e.printStackTrace();
System.exit(1);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
appLock.unlock();
}
});
}
AbstractUi ui = null;
if (args.length > 0) {
AwtService awtService = new AwtService(clipboardManager);
clipboardManager.addPasteService(awtService);
clipboardManager.addCopyService(awtService);
if ("cli".equalsIgnoreCase(args[0])) {
ui = new ShellUi(this.settings, syncManager, clipboardManager);
} else {
ui = new SingleCommandUi(this.settings, syncManager, clipboardManager, args[0]);
}
} else {
ui = new SwtGui(this.settings, syncManager, clipboardManager);
}
ui.open();
this.settings.saveProperties();
- CopyItDesktop.dbusConnection.disconnect();
+ if (OSValidator.isUnix()) {
+ CopyItDesktop.dbusConnection.disconnect();
+ }
executor.shutdown();
}
class StreamBuilder extends FileStreamBuilder {
private File settingsFile;
public StreamBuilder() throws IOException {
this.settingsFile = new File(PathBuilder.getConfigDirectory(),
"options.properties");
if (!this.settingsFile.exists()) {
log.info("No settings file. Creating it.");
this.settingsFile.createNewFile();
}
}
@Override
public FileInputStream getInputStream() throws IOException {
return new FileInputStream(this.settingsFile);
}
@Override
public FileOutputStream getOutputStream() throws IOException {
return new FileOutputStream(this.settingsFile);
}
}
}
| true | true | public void run(String[] args) {
log.info("The application is launched");
this.settings = new Settings();
try {
this.settings.setFileStreamBuilder(new StreamBuilder());
} catch (IOException e) {
e.printStackTrace();
System.exit(1);
}
this.settings.loadProperties();
ExecutorService executor = Executors.newSingleThreadExecutor(Executors
.defaultThreadFactory());
final ClipboardManager clipboardManager = new ClipboardManager();
clipboardManager.setExecutor(executor);
final SyncManager syncManager = new SyncManager(clipboardManager);
syncManager.setExecutor(executor);
ServerApi api = new ServerApi();
api.deviceId = (settings.get("device.id") != null) ? UUID.fromString(settings.get("device.id")) : null;
api.devicePassword = settings.get("device.password");
TestService testService = new TestService(syncManager);
final ApiService apiService = new ApiService(syncManager,
new ClipboardContentEndpoint(api));
final SyncingThread syncThread = new SyncingThread(syncManager,
new ClipboardContentEndpoint(api));
SettingsListener apiServiceListener = new SettingsListener() {
@Override
public void onChange(String key, String value) {
ServerApi api = new ServerApi();
api.deviceId = UUID.fromString(settings.get("device.id"));
api.devicePassword = settings.get("device.password");
ClipboardContentEndpoint endpoint = new ClipboardContentEndpoint(
api);
apiService.setEndpoint(endpoint);
syncThread.setEndpoint(endpoint);
}
};
this.settings.addListener("device.id", apiServiceListener);
this.settings.addListener("device.password", apiServiceListener);
syncManager.addPushService(apiService);
syncManager.addPullService(apiService);
syncManager.addPushService(testService);
syncManager.addPullingService(syncThread);
syncManager.addPullingService(testService);
syncManager.addListener(new SyncListener() {
@Override
public void onPushed(String content, Date date) {
log.debug("The following content was pushed: {}", content);
}
@Override
public void onPulled(String content, Date date) {
log.debug("The following content was pulled: {}", content);
}
});
this.settings.addListener("sync.polling.enabled",
new SettingsListener() {
@Override
public void onChange(String key, String value) {
if (Boolean.parseBoolean(value)) {
syncManager.activatePolling();
clipboardManager.activatePolling();
log.debug("The sync manager and clipboard manager have been enabled");
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
log.debug("The sync manager and clipboard manager have been disabled");
}
}
});
if (this.settings.getBoolean("sync.polling.enabled")) {
syncManager.activatePolling();
clipboardManager.activatePolling();
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
}
if (OSValidator.isUnix()) {
this.exportResource("libunix-java.so");
try {
CopyItDesktop.dbusConnection = DBusConnection
.getConnection(DBusConnection.SESSION);
} catch (DBusException e1) {
// TODO Auto-generated catch block
log.error(
"Ahh could not connect to D-Bus. All kinds of explosions n'stuff. Fix it!",
e1);
e1.printStackTrace();
System.exit(1);
}
}
final ApplicationLock appLock = new ApplicationLock(
PathBuilder.getConfigDirectory());
if (appLock.isRunning()) {
String message = "An instance is already running. ";
if (OSValidator.isUnix()) {
try {
Notifications notify = CopyItDesktop.dbusConnection
.getRemoteObject("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
Notifications.class);
Map<String, Variant<Byte>> hints = new HashMap<String, Variant<Byte>>();
hints.put("urgency", new Variant<Byte>((byte) 2));
notify.Notify("CopyIt", new UInt32(0), "", "CopyIt",
message, new LinkedList<String>(), hints, -1);
} catch (DBusException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
log.info(message);
System.exit(0);
} else {
try {
appLock.lock();
} catch (LockException e) {
e.printStackTrace();
System.exit(1);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
appLock.unlock();
}
});
}
AbstractUi ui = null;
if (args.length > 0) {
AwtService awtService = new AwtService(clipboardManager);
clipboardManager.addPasteService(awtService);
clipboardManager.addCopyService(awtService);
if ("cli".equalsIgnoreCase(args[0])) {
ui = new ShellUi(this.settings, syncManager, clipboardManager);
} else {
ui = new SingleCommandUi(this.settings, syncManager, clipboardManager, args[0]);
}
} else {
ui = new SwtGui(this.settings, syncManager, clipboardManager);
}
ui.open();
this.settings.saveProperties();
CopyItDesktop.dbusConnection.disconnect();
executor.shutdown();
}
| public void run(String[] args) {
log.info("The application is launched");
this.settings = new Settings();
try {
this.settings.setFileStreamBuilder(new StreamBuilder());
} catch (IOException e) {
e.printStackTrace();
System.exit(1);
}
this.settings.loadProperties();
ExecutorService executor = Executors.newSingleThreadExecutor(Executors
.defaultThreadFactory());
final ClipboardManager clipboardManager = new ClipboardManager();
clipboardManager.setExecutor(executor);
final SyncManager syncManager = new SyncManager(clipboardManager);
syncManager.setExecutor(executor);
ServerApi api = new ServerApi();
api.deviceId = (settings.get("device.id") != null) ? UUID.fromString(settings.get("device.id")) : null;
api.devicePassword = settings.get("device.password");
TestService testService = new TestService(syncManager);
final ApiService apiService = new ApiService(syncManager,
new ClipboardContentEndpoint(api));
final SyncingThread syncThread = new SyncingThread(syncManager,
new ClipboardContentEndpoint(api));
SettingsListener apiServiceListener = new SettingsListener() {
@Override
public void onChange(String key, String value) {
ServerApi api = new ServerApi();
api.deviceId = UUID.fromString(settings.get("device.id"));
api.devicePassword = settings.get("device.password");
ClipboardContentEndpoint endpoint = new ClipboardContentEndpoint(
api);
apiService.setEndpoint(endpoint);
syncThread.setEndpoint(endpoint);
}
};
this.settings.addListener("device.id", apiServiceListener);
this.settings.addListener("device.password", apiServiceListener);
syncManager.addPushService(apiService);
syncManager.addPullService(apiService);
syncManager.addPushService(testService);
syncManager.addPullingService(syncThread);
syncManager.addPullingService(testService);
syncManager.addListener(new SyncListener() {
@Override
public void onPushed(String content, Date date) {
log.debug("The following content was pushed: {}", content);
}
@Override
public void onPulled(String content, Date date) {
log.debug("The following content was pulled: {}", content);
}
});
this.settings.addListener("sync.polling.enabled",
new SettingsListener() {
@Override
public void onChange(String key, String value) {
if (Boolean.parseBoolean(value)) {
syncManager.activatePolling();
clipboardManager.activatePolling();
log.debug("The sync manager and clipboard manager have been enabled");
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
log.debug("The sync manager and clipboard manager have been disabled");
}
}
});
if (this.settings.getBoolean("sync.polling.enabled")) {
syncManager.activatePolling();
clipboardManager.activatePolling();
} else {
syncManager.deactivatePolling();
clipboardManager.deactivatePolling();
}
if (OSValidator.isUnix()) {
this.exportResource("libunix-java.so");
try {
CopyItDesktop.dbusConnection = DBusConnection
.getConnection(DBusConnection.SESSION);
} catch (DBusException e1) {
// TODO Auto-generated catch block
log.error(
"Ahh could not connect to D-Bus. All kinds of explosions n'stuff. Fix it!",
e1);
e1.printStackTrace();
System.exit(1);
}
}
final ApplicationLock appLock = new ApplicationLock(
PathBuilder.getConfigDirectory());
if (appLock.isRunning()) {
String message = "An instance is already running. ";
if (OSValidator.isUnix()) {
try {
Notifications notify = CopyItDesktop.dbusConnection
.getRemoteObject("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
Notifications.class);
Map<String, Variant<Byte>> hints = new HashMap<String, Variant<Byte>>();
hints.put("urgency", new Variant<Byte>((byte) 2));
notify.Notify("CopyIt", new UInt32(0), "", "CopyIt",
message, new LinkedList<String>(), hints, -1);
} catch (DBusException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
log.info(message);
System.exit(0);
} else {
try {
appLock.lock();
} catch (LockException e) {
e.printStackTrace();
System.exit(1);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
appLock.unlock();
}
});
}
AbstractUi ui = null;
if (args.length > 0) {
AwtService awtService = new AwtService(clipboardManager);
clipboardManager.addPasteService(awtService);
clipboardManager.addCopyService(awtService);
if ("cli".equalsIgnoreCase(args[0])) {
ui = new ShellUi(this.settings, syncManager, clipboardManager);
} else {
ui = new SingleCommandUi(this.settings, syncManager, clipboardManager, args[0]);
}
} else {
ui = new SwtGui(this.settings, syncManager, clipboardManager);
}
ui.open();
this.settings.saveProperties();
if (OSValidator.isUnix()) {
CopyItDesktop.dbusConnection.disconnect();
}
executor.shutdown();
}
|
diff --git a/src/org/hyperic/hq/hqapi1/tools/Shell.java b/src/org/hyperic/hq/hqapi1/tools/Shell.java
index 8db9b99..dc068c3 100644
--- a/src/org/hyperic/hq/hqapi1/tools/Shell.java
+++ b/src/org/hyperic/hq/hqapi1/tools/Shell.java
@@ -1,80 +1,81 @@
/*
*
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2008, 2009], Hyperic, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*
*/
package org.hyperic.hq.hqapi1.tools;
import java.util.Map;
import java.util.TreeMap;
public class Shell {
private static Map<String,Command> _commands = new TreeMap<String,Command>();
static {
_commands.put("agent", new AgentCommand());
_commands.put("alertdefinition", new AlertDefinitionCommand());
_commands.put("autodiscovery", new AutoDiscoveryCommand());
_commands.put("escalation", new EscalationCommand());
_commands.put("group", new GroupCommand());
_commands.put("metric", new MetricCommand());
_commands.put("metricData", new MetricDataCommand());
_commands.put("metricTemplate", new MetricTemplateCommand());
_commands.put("resource", new ResourceCommand());
_commands.put("role", new RoleCommand());
_commands.put("user", new UserCommand());
}
private static void printHelp() {
System.out.println("HQ Api Command Shell");
System.out.println("");
System.out.println("Available commands:");
for (String command : _commands.keySet()) {
System.out.println(" " + command);
}
}
public static void main(String[] args) throws Exception {
if (args.length == 0) {
printHelp();
System.exit(-1);
}
Command cmd = _commands.get(args[0]);
if (cmd == null) {
printHelp();
System.exit(-1);
}
try {
cmd.handleCommand(Command.trim(args));
} catch (Exception e) {
- System.out.println("Error running command: " + e.getMessage());
+ System.err.println("Error running command: " + e.getMessage());
+ e.printStackTrace(System.err);
System.exit(-1);
}
}
}
| true | true | public static void main(String[] args) throws Exception {
if (args.length == 0) {
printHelp();
System.exit(-1);
}
Command cmd = _commands.get(args[0]);
if (cmd == null) {
printHelp();
System.exit(-1);
}
try {
cmd.handleCommand(Command.trim(args));
} catch (Exception e) {
System.out.println("Error running command: " + e.getMessage());
System.exit(-1);
}
}
| public static void main(String[] args) throws Exception {
if (args.length == 0) {
printHelp();
System.exit(-1);
}
Command cmd = _commands.get(args[0]);
if (cmd == null) {
printHelp();
System.exit(-1);
}
try {
cmd.handleCommand(Command.trim(args));
} catch (Exception e) {
System.err.println("Error running command: " + e.getMessage());
e.printStackTrace(System.err);
System.exit(-1);
}
}
|
diff --git a/src/org/siraya/rent/user/service/SessionService.java b/src/org/siraya/rent/user/service/SessionService.java
index ab1b0b6..af112e5 100644
--- a/src/org/siraya/rent/user/service/SessionService.java
+++ b/src/org/siraya/rent/user/service/SessionService.java
@@ -1,42 +1,42 @@
package org.siraya.rent.user.service;
import org.siraya.rent.pojo.Session;
import org.siraya.rent.user.dao.ISessionDao;
import org.siraya.rent.user.dao.IDeviceDao;
import org.siraya.rent.utils.RentException;
import org.slf4j.Logger;
import org.siraya.rent.utils.RentException;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Service("sessionService")
public class SessionService implements ISessionService {
@Autowired
private ISessionDao sessionDao;
@Autowired
private IDeviceDao deviceDao;
private static Logger logger = LoggerFactory.getLogger(SessionService.class);
@Transactional(value = "rentTxManager", propagation = Propagation.SUPPORTS, readOnly = false, rollbackFor = java.lang.Throwable.class)
public void newSession(Session session) {
this.sessionDao.newSession(session);
int ret = this.deviceDao.updateLastLoginIp(session);
if (ret != 1) {
throw new RentException(RentException.RentErrorCode.ErrorNotFound,
- "update device not found");
+ "new session try to update device but update device not found");
}
}
public void setSessionDao(ISessionDao sessionDao) {
this.sessionDao = sessionDao;
}
public void setDeviceDao(IDeviceDao deviceDao) {
this.deviceDao = deviceDao;
}
}
| true | true | public void newSession(Session session) {
this.sessionDao.newSession(session);
int ret = this.deviceDao.updateLastLoginIp(session);
if (ret != 1) {
throw new RentException(RentException.RentErrorCode.ErrorNotFound,
"update device not found");
}
}
| public void newSession(Session session) {
this.sessionDao.newSession(session);
int ret = this.deviceDao.updateLastLoginIp(session);
if (ret != 1) {
throw new RentException(RentException.RentErrorCode.ErrorNotFound,
"new session try to update device but update device not found");
}
}
|
diff --git a/javasrc/src/org/ccnx/ccn/test/io/content/CCNNetworkObjectTest.java b/javasrc/src/org/ccnx/ccn/test/io/content/CCNNetworkObjectTest.java
index e06484eb9..8cb29228c 100644
--- a/javasrc/src/org/ccnx/ccn/test/io/content/CCNNetworkObjectTest.java
+++ b/javasrc/src/org/ccnx/ccn/test/io/content/CCNNetworkObjectTest.java
@@ -1,718 +1,723 @@
/*
* A CCNx library test.
*
* Copyright (C) 2008, 2009, 2011 Palo Alto Research Center, Inc.
*
* This work is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License version 2 as published by the
* Free Software Foundation.
* This work is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details. You should have received a copy of the GNU General Public
* License along with this program; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
package org.ccnx.ccn.test.io.content;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.util.logging.Level;
import org.bouncycastle.util.Arrays;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.impl.CCNFlowServer;
import org.ccnx.ccn.impl.CCNFlowControl.SaveType;
import org.ccnx.ccn.impl.security.crypto.util.DigestHelper;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.CCNVersionedInputStream;
import org.ccnx.ccn.io.content.CCNNetworkObject;
import org.ccnx.ccn.io.content.CCNStringObject;
import org.ccnx.ccn.io.content.Collection;
import org.ccnx.ccn.io.content.Link;
import org.ccnx.ccn.io.content.LinkAuthenticator;
import org.ccnx.ccn.io.content.UpdateListener;
import org.ccnx.ccn.io.content.Collection.CollectionObject;
import org.ccnx.ccn.profiles.SegmentationProfile;
import org.ccnx.ccn.profiles.VersioningProfile;
import org.ccnx.ccn.protocol.CCNTime;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.ContentObject;
import org.ccnx.ccn.protocol.PublisherID;
import org.ccnx.ccn.protocol.SignedInfo;
import org.ccnx.ccn.protocol.PublisherID.PublisherType;
import org.ccnx.ccn.test.CCNTestHelper;
import org.ccnx.ccn.test.Flosser;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Test basic network object functionality, writing objects to a Flosser.
* Much slower than it needs to be -- seems to hit some kind of ordering
* bug which requires waiting for interest reexpression before it can go
* forward (shows up as mysterious 4-second delays in the log). The corresponding
* repo-backed test, CCNNetorkObjectTestRepo runs much faster to do exactly
* the same work.
* TODO track down slowness
*/
public class CCNNetworkObjectTest {
/**
* Handle naming for the test
*/
static CCNTestHelper testHelper = new CCNTestHelper(CCNNetworkObjectTest.class);
static String stringObjName = "StringObject";
static String collectionObjName = "CollectionObject";
static String prefix = "CollectionObject-";
static ContentName [] ns = null;
static public byte [] contenthash1 = new byte[32];
static public byte [] contenthash2 = new byte[32];
static public byte [] publisherid1 = new byte[32];
static public byte [] publisherid2 = new byte[32];
static PublisherID pubID1 = null;
static PublisherID pubID2 = null;
static int NUM_LINKS = 15;
static LinkAuthenticator [] las = new LinkAuthenticator[NUM_LINKS];
static Link [] lrs = null;
static Collection small1;
static Collection small2;
static Collection empty;
static Collection big;
static CCNHandle handle;
static String [] numbers = new String[]{"ONE", "TWO", "THREE", "FOUR", "FIVE", "SIX", "SEVEN", "EIGHT", "NINE", "TEN"};
static Level oldLevel;
static Flosser flosser = null;
static void setupNamespace(ContentName name) throws IOException {
flosser.handleNamespace(name);
}
static void removeNamespace(ContentName name) throws IOException {
flosser.stopMonitoringNamespace(name);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
try {
Log.info("Tearing down CCNNetworkObjectTest, prefix {0}", testHelper.getClassNamespace());
Log.flush();
Log.setDefaultLevel(oldLevel);
if (flosser != null) {
flosser.stop();
flosser = null;
}
Log.info("Finished tearing down CCNNetworkObjectTest, prefix {0}", testHelper.getClassNamespace());
Log.flush();
} catch (Exception e) {
Log.severe("Exception in tearDownAfterClass: type {0} msg {0}", e.getClass().getName(), e.getMessage());
Log.warningStackTrace(e);
}
}
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Log.info("Setting up CCNNetworkObjectTest, prefix {0}", testHelper.getClassNamespace());
oldLevel = Log.getLevel();
Log.setDefaultLevel(Level.INFO);
handle = CCNHandle.open();
ns = new ContentName[NUM_LINKS];
for (int i=0; i < NUM_LINKS; ++i) {
ns[i] = ContentName.fromNative(testHelper.getClassNamespace(), "Links", prefix+Integer.toString(i));
}
Arrays.fill(publisherid1, (byte)6);
Arrays.fill(publisherid2, (byte)3);
pubID1 = new PublisherID(publisherid1, PublisherType.KEY);
pubID2 = new PublisherID(publisherid2, PublisherType.ISSUER_KEY);
las[0] = new LinkAuthenticator(pubID1);
las[1] = null;
las[2] = new LinkAuthenticator(pubID2, null, null,
SignedInfo.ContentType.DATA, contenthash1);
las[3] = new LinkAuthenticator(pubID1, null, CCNTime.now(),
null, contenthash1);
for (int j=4; j < NUM_LINKS; ++j) {
las[j] = new LinkAuthenticator(pubID2, null, CCNTime.now(), null, null);
}
lrs = new Link[NUM_LINKS];
for (int i=0; i < lrs.length; ++i) {
lrs[i] = new Link(ns[i],las[i]);
}
empty = new Collection();
small1 = new Collection();
small2 = new Collection();
for (int i=0; i < 5; ++i) {
small1.add(lrs[i]);
small2.add(lrs[i+5]);
}
big = new Collection();
for (int i=0; i < NUM_LINKS; ++i) {
big.add(lrs[i]);
}
flosser = new Flosser();
Log.info("Finished setting up CCNNetworkObjectTest, prefix is: {0}.", testHelper.getClassNamespace());
}
@AfterClass
public static void cleanupAfterClass() {
handle.close();
}
@Test
public void testVersioning() throws Exception {
// Testing problem of disappearing versions, inability to get latest. Use simpler
// object than a collection.
CCNHandle lput = CCNHandle.open();
CCNHandle lget = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testVersioning"), stringObjName);
try {
CCNStringObject so = new CCNStringObject(testName, "First value", SaveType.RAW, lput);
setupNamespace(testName);
CCNStringObject ro = null;
CCNStringObject ro2 = null;
CCNStringObject ro3, ro4; // make each time, to get a new handle.
CCNTime soTime, srTime, sr2Time, sr3Time, sr4Time, so2Time;
for (int i=0; i < numbers.length; ++i) {
soTime = saveAndLog(numbers[i], so, null, numbers[i]);
if (null == ro) {
ro = new CCNStringObject(testName, lget);
srTime = waitForDataAndLog(numbers[i], ro);
} else {
srTime = updateAndLog(numbers[i], ro, null);
}
if (null == ro2) {
ro2 = new CCNStringObject(testName, null);
sr2Time = waitForDataAndLog(numbers[i], ro2);
} else {
sr2Time = updateAndLog(numbers[i], ro2, null);
}
ro3 = new CCNStringObject(ro.getVersionedName(), null); // read specific version
sr3Time = waitForDataAndLog("UpdateToROVersion", ro3);
// Save a new version and pull old
so2Time = saveAndLog(numbers[i] + "-Update", so, null, numbers[i] + "-Update");
ro4 = new CCNStringObject(ro.getVersionedName(), null); // read specific version
sr4Time = waitForDataAndLog("UpdateAnotherToROVersion", ro4);
System.out.println("Update " + i + ": Times: " + soTime + " " + srTime + " " + sr2Time + " " + sr3Time + " different: " + so2Time);
Assert.assertEquals("SaveTime doesn't match first read", soTime, srTime);
Assert.assertEquals("SaveTime doesn't match second read", soTime, sr2Time);
Assert.assertEquals("SaveTime doesn't match specific version read", soTime, sr3Time);
Assert.assertFalse("UpdateTime isn't newer than read time", soTime.equals(so2Time));
Assert.assertEquals("SaveTime doesn't match specific version read", soTime, sr4Time);
}
} finally {
removeNamespace(testName);
lput.close();
lget.close();
}
}
@Test
public void testSaveToVersion() throws Exception {
// Testing problem of disappearing versions, inability to get latest. Use simpler
// object than a collection.
CCNHandle lput = CCNHandle.open();
CCNHandle lget = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testSaveToVersion"), stringObjName);
try {
CCNTime desiredVersion = CCNTime.now();
CCNStringObject so = new CCNStringObject(testName, "First value", SaveType.RAW, lput);
setupNamespace(testName);
saveAndLog("SpecifiedVersion", so, desiredVersion, "Time: " + desiredVersion);
Assert.assertEquals("Didn't write correct version", desiredVersion, so.getVersion());
CCNStringObject ro = new CCNStringObject(testName, lget);
ro.waitForData();
Assert.assertEquals("Didn't read correct version", desiredVersion, ro.getVersion());
ContentName versionName = ro.getVersionedName();
saveAndLog("UpdatedVersion", so, null, "ReplacementData");
updateAndLog("UpdatedData", ro, null);
Assert.assertTrue("New version " + so.getVersion() + " should be later than old version " + desiredVersion, (desiredVersion.before(so.getVersion())));
Assert.assertEquals("Didn't read correct version", so.getVersion(), ro.getVersion());
CCNStringObject ro2 = new CCNStringObject(versionName, null);
ro2.waitForData();
Assert.assertEquals("Didn't read correct version", desiredVersion, ro2.getVersion());
} finally {
removeNamespace(testName);
lput.close();
lget.close();
}
}
@Test
public void testEmptySave() throws Exception {
boolean caught = false;
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testEmptySave"), collectionObjName);
try {
CollectionObject emptycoll =
new CollectionObject(testName, (Collection)null, SaveType.RAW, handle);
setupNamespace(testName);
try {
emptycoll.setData(small1); // set temporarily to non-null
saveAndLog("Empty", emptycoll, null, null);
} catch (InvalidObjectException iox) {
// this is what we expect to happen
caught = true;
}
Assert.assertTrue("Failed to produce expected exception.", caught);
} finally {
removeNamespace(testName);
}
}
@Test
public void testStreamUpdate() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testStreamUpdate"), collectionObjName);
CCNHandle tHandle = CCNHandle.open();
try {
CollectionObject testCollectionObject = new CollectionObject(testName, small1, SaveType.RAW, tHandle);
setupNamespace(testName);
saveAndLog("testStreamUpdate", testCollectionObject, null, small1);
System.out.println("testCollectionObject name: " + testCollectionObject.getVersionedName());
CCNVersionedInputStream vis = new CCNVersionedInputStream(testCollectionObject.getVersionedName());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte [] buf = new byte[128];
// Will incur a timeout
while (!vis.eof()) {
int read = vis.read(buf);
if (read > 0)
baos.write(buf, 0, read);
}
System.out.println("Read " + baos.toByteArray().length + " bytes, digest: " +
DigestHelper.printBytes(DigestHelper.digest(baos.toByteArray()), 16));
Collection decodedData = new Collection();
decodedData.decode(baos.toByteArray());
System.out.println("Decoded collection data: " + decodedData);
Assert.assertEquals("Decoding via stream fails to give expected result!", decodedData, small1);
CCNVersionedInputStream vis2 = new CCNVersionedInputStream(testCollectionObject.getVersionedName());
ByteArrayOutputStream baos2 = new ByteArrayOutputStream();
// Will incur a timeout
while (!vis2.eof()) {
int val = vis2.read();
if (val < 0)
break;
baos2.write((byte)val);
}
System.out.println("Read " + baos2.toByteArray().length + " bytes, digest: " +
DigestHelper.printBytes(DigestHelper.digest(baos2.toByteArray()), 16));
Assert.assertArrayEquals("Reading same object twice gets different results!", baos.toByteArray(), baos2.toByteArray());
Collection decodedData2 = new Collection();
decodedData2.decode(baos2.toByteArray());
Assert.assertEquals("Decoding via stream byte read fails to give expected result!", decodedData2, small1);
CCNVersionedInputStream vis3 = new CCNVersionedInputStream(testCollectionObject.getVersionedName());
Collection decodedData3 = new Collection();
decodedData3.decode(vis3);
Assert.assertEquals("Decoding via stream full read fails to give expected result!", decodedData3, small1);
} finally {
removeNamespace(testName);
tHandle.close();
}
}
@Test
public void testVersionOrdering() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testVersionOrdering"), collectionObjName, "name1");
ContentName testName2 = ContentName.fromNative(testHelper.getTestNamespace("testVersionOrdering"), collectionObjName, "name2");
CCNHandle tHandle = CCNHandle.open();
try {
CollectionObject c0 = new CollectionObject(testName, empty, SaveType.RAW, handle);
setupNamespace(testName);
CCNTime t0 = saveAndLog("Empty", c0, null, empty);
CollectionObject c1 = new CollectionObject(testName2, small1, SaveType.RAW, tHandle);
CollectionObject c2 = new CollectionObject(testName2, small1, SaveType.RAW, null);
setupNamespace(testName2);
CCNTime t1 = saveAndLog("Small", c1, null, small1);
Assert.assertTrue("First version should come before second", t0.before(t1));
CCNTime t2 = saveAndLog("Small2ndWrite", c2, null, small1);
Assert.assertTrue("Third version should come after second", t1.before(t2));
Assert.assertTrue(c2.contentEquals(c1));
Assert.assertFalse(c2.equals(c1));
Assert.assertTrue(VersioningProfile.isLaterVersionOf(c2.getVersionedName(), c1.getVersionedName()));
} finally {
removeNamespace(testName);
removeNamespace(testName2);
tHandle.close();
}
}
@Test
public void testUpdateOtherName() throws Exception {
CCNHandle tHandle = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testUpdateOtherName"), collectionObjName, "name1");
ContentName testName2 = ContentName.fromNative(testHelper.getTestNamespace("testUpdateOtherName"), collectionObjName, "name2");
try {
CollectionObject c0 = new CollectionObject(testName, empty, SaveType.RAW, handle);
setupNamespace(testName);
CCNTime t0 = saveAndLog("Empty", c0, null, empty);
CollectionObject c1 = new CollectionObject(testName2, small1, SaveType.RAW, tHandle);
// Cheat a little, make this one before the setupNamespace...
CollectionObject c2 = new CollectionObject(testName2, small1, SaveType.RAW, null);
setupNamespace(testName2);
CCNTime t1 = saveAndLog("Small", c1, null, small1);
Assert.assertTrue("First version should come before second", t0.before(t1));
CCNTime t2 = saveAndLog("Small2ndWrite", c2, null, small1);
Assert.assertTrue("Third version should come after second", t1.before(t2));
Assert.assertTrue(c2.contentEquals(c1));
Assert.assertFalse(c2.equals(c1));
CCNTime t3 = updateAndLog(c0.getVersionedName().toString(), c0, testName2);
Assert.assertTrue(VersioningProfile.isVersionOf(c0.getVersionedName(), testName2));
Assert.assertEquals(t3, t2);
Assert.assertTrue(c0.contentEquals(c2));
t3 = updateAndLog(c0.getVersionedName().toString(), c0, c1.getVersionedName());
Assert.assertTrue(VersioningProfile.isVersionOf(c0.getVersionedName(), testName2));
Assert.assertEquals(t3, t1);
Assert.assertTrue(c0.contentEquals(c1));
} finally {
removeNamespace(testName);
removeNamespace(testName2);
tHandle.close();
}
}
@Test
public void testUpdateInBackground() throws Exception {
CCNHandle tHandle = CCNHandle.open();
CCNHandle tHandle2 = CCNHandle.open();
CCNHandle tHandle3 = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testUpdateInBackground"), stringObjName, "name1");
try {
CCNStringObject c0 = new CCNStringObject(testName, (String)null, SaveType.RAW, tHandle);
c0.updateInBackground();
CCNStringObject c1 = new CCNStringObject(testName, (String)null, SaveType.RAW, tHandle2);
c1.updateInBackground(true);
Assert.assertFalse(c0.available());
Assert.assertFalse(c0.isSaved());
Assert.assertFalse(c1.available());
Assert.assertFalse(c1.isSaved());
CCNStringObject c2 = new CCNStringObject(testName, (String)null, SaveType.RAW, tHandle3);
CCNTime t1 = saveAndLog("First string", c2, null, "Here is the first string.");
Log.info("Saved c2: " + c2.getVersionedName() + " c0 available? " + c0.available() + " c1 available? " + c1.available());
c0.waitForData();
Assert.assertEquals("c0 update", c0.getVersion(), c2.getVersion());
c1.waitForData();
Assert.assertEquals("c1 update", c1.getVersion(), c2.getVersion());
CCNTime t2 = saveAndLog("Second string", c2, null, "Here is the second string.");
synchronized (c1) {
if (!c1.getVersion().equals(t2)) {
c1.wait(5000);
}
}
Assert.assertEquals("c1 update 2", c1.getVersion(), c2.getVersion());
Assert.assertEquals("c0 unchanged", c0.getVersion(), t1);
} finally {
removeNamespace(testName);
tHandle.close();
tHandle2.close();
tHandle3.close();
}
}
@Test
public void testBackgroundVerifier() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testBackgroundVerifier"), stringObjName, "name1");
try {
CCNStringObject c0 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c0.updateInBackground(true);
CCNStringObject c1 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c1.updateInBackground(true);
CCNTime t1 = saveAndLog("First string", c0, null, "Here is the first string.");
- c0.waitForData();
+ synchronized (c0) {
+ if (!c0.getVersion().equals(t1)) {
+ c0.wait(5000);
+ }
+ }
c1.waitForData();
CCNTime c1Version = c1.getVersion();
Assert.assertTrue(c0.available());
Assert.assertTrue(c0.isSaved());
Assert.assertTrue(c1.available());
Assert.assertTrue(c1.isSaved());
Assert.assertEquals(t1, c1Version);
// Test background ability to throw away bogus data.
// change the version so a) it's later, and b) the signature won't verify
ContentName laterName = SegmentationProfile.segmentName(VersioningProfile.updateVersion(c1.getVersionedName()),
SegmentationProfile.baseSegment());
CCNFlowServer server = new CCNFlowServer(testName, null, false, CCNHandle.open());
server.addNameSpace(laterName);
ContentObject bogon =
new ContentObject(laterName, c0.getFirstSegment().signedInfo(),
c0.getFirstSegment().content(), c0.getFirstSegment().signature());
Log.info("Writing bogon: {0}", bogon.fullName());
server.put(bogon);
Thread.sleep(300);
// Should be no update
Assert.assertEquals(c0.getVersion(), c1Version);
Assert.assertEquals(c1.getVersion(), c1Version);
// Now write a newer one
CCNStringObject c2 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
CCNTime t2 = saveAndLog("Second string", c2, null, "Here is the second string.");
Log.info("Saved c2: " + c2.getVersionedName() + " c0 available? " + c0.available() + " c1 available? " + c1.available());
synchronized (c0) {
if (!c0.getVersion().equals(t2)) {
c0.wait(5000);
}
+ Log.info("waited - t2 is {0}", t2);
}
Assert.assertEquals("c0 update", c0.getVersion(), c2.getVersion());
synchronized (c1) {
if (!c1.getVersion().equals(t2)) {
c1.wait(5000);
}
}
Assert.assertEquals("c1 update", c1.getVersion(), c2.getVersion());
Assert.assertFalse(c1Version.equals(c1.getVersion()));
} finally {
removeNamespace(testName);
}
}
@Test
public void testSaveAsGone() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testSaveAsGone"), collectionObjName);
CCNHandle tHandle = CCNHandle.open();
CCNHandle tHandle2 = CCNHandle.open();
try {
Log.info("TSAG: Entering testSaveAsGone");
CollectionObject c0 = new CollectionObject(testName, empty, SaveType.RAW, handle);
setupNamespace(testName); // this sends the interest, doing it after the object gives it
// a chance to catch it.
CCNTime t0 = saveAsGoneAndLog("FirstGoneSave", c0);
Assert.assertTrue("Should be gone", c0.isGone());
ContentName goneVersionName = c0.getVersionedName();
Log.info("T1");
CCNTime t1 = saveAndLog("NotGone", c0, null, small1);
Assert.assertFalse("Should not be gone", c0.isGone());
Assert.assertTrue(t1.after(t0));
Log.info("T2");
CollectionObject c1 = new CollectionObject(testName, tHandle);
CCNTime t2 = waitForDataAndLog(testName.toString(), c1);
Assert.assertFalse("Read back should not be gone", c1.isGone());
Assert.assertEquals(t2, t1);
Log.info("T3");
CCNTime t3 = updateAndLog(goneVersionName.toString(), c1, goneVersionName);
Assert.assertTrue(VersioningProfile.isVersionOf(c1.getVersionedName(), testName));
Assert.assertEquals(t3, t0);
Assert.assertTrue("Read back should be gone.", c1.isGone());
Log.info("T4");
t0 = saveAsGoneAndLog("GoneAgain", c0);
Assert.assertTrue("Should be gone", c0.isGone());
Log.info("TSAG: Updating new object: {0}", testName);
CollectionObject c2 = new CollectionObject(testName, tHandle2);
Log.info("TSAG: Waiting for: {0}", testName);
CCNTime t4 = waitForDataAndLog(testName.toString(), c2);
Log.info("TSAG: Waited for: {0}", c2.getVersionedName());
Assert.assertTrue("Read back of " + c0.getVersionedName() + " should be gone, got " + c2.getVersionedName(), c2.isGone());
Assert.assertEquals(t4, t0);
Log.info("TSAG: Leaving testSaveAsGone.");
} finally {
removeNamespace(testName);
tHandle.close();
tHandle2.close();
}
}
@Test
public void testUpdateDoesNotExist() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testUpdateDoesNotExist"), collectionObjName);
CCNHandle tHandle = CCNHandle.open();
try {
Log.info("CCNNetworkObjectTest: Entering testUpdateDoesNotExist");
CCNStringObject so = new CCNStringObject(testName, handle);
// so should catch exception thrown by underlying stream when it times out.
Assert.assertFalse(so.available());
// try to pick up anything that happens to appear
so.updateInBackground();
CCNStringObject sowrite = new CCNStringObject(testName, "Now we write something.", SaveType.RAW, tHandle);
setupNamespace(testName);
saveAndLog("testUpdateDoesNotExist: Delayed write", sowrite, null, "Now we write something.");
Log.flush();
so.waitForData();
Assert.assertTrue(so.available());
Assert.assertEquals(so.string(), sowrite.string());
Assert.assertEquals(so.getVersionedName(), sowrite.getVersionedName());
Log.info("CCNNetworkObjectTest: Leaving testUpdateDoesNotExist");
Log.flush();
} finally {
removeNamespace(testName);
tHandle.close();
}
}
@Test
public void testFirstSegmentInfo() throws Exception {
// Testing for matching info about first segment.
CCNHandle lput = CCNHandle.open();
CCNHandle lget = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testFirstSegmentInfo"), stringObjName);
try {
CCNTime desiredVersion = CCNTime.now();
CCNStringObject so = new CCNStringObject(testName, "First value", SaveType.RAW, lput);
setupNamespace(testName);
saveAndLog("SpecifiedVersion", so, desiredVersion, "Time: " + desiredVersion);
Assert.assertEquals("Didn't write correct version", desiredVersion, so.getVersion());
CCNStringObject ro = new CCNStringObject(testName, lget);
ro.waitForData();
Assert.assertEquals("Didn't read correct version", desiredVersion, ro.getVersion());
Assert.assertEquals("Didn't match first segment number", so.firstSegmentNumber(), ro.firstSegmentNumber());
Assert.assertArrayEquals("Didn't match first segment digest", so.getFirstDigest(), ro.getFirstDigest());
} finally {
removeNamespace(testName);
lput.close();
lget.close();
}
}
static class CounterListener implements UpdateListener {
protected Integer _callbackCounter = 0;
public int getCounter() { return _callbackCounter; }
public void newVersionAvailable(CCNNetworkObject<?> newVersion, boolean wasSave) {
synchronized (_callbackCounter) {
_callbackCounter++;
if (Log.isLoggable(Level.INFO)) {
Log.info("UPDATE CALLBACK: counter is " + _callbackCounter + " was save? " + wasSave);
}
}
}
}
@Test
public void testUpdateListener() throws Exception {
SaveType saveType = SaveType.RAW;
CCNHandle writeHandle = CCNHandle.open();
CCNHandle readHandle = CCNHandle.open();
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testUpdateListener"),
stringObjName);
CounterListener ourListener = new CounterListener();
CCNStringObject readObject =
new CCNStringObject(testName, null, null, readHandle);
readObject.addListener(ourListener);
setupNamespace(testName);
CCNStringObject writeObject =
new CCNStringObject(testName, "Something to listen to.", saveType, writeHandle);
writeObject.save();
boolean result = readObject.update();
Assert.assertTrue(result);
Assert.assertTrue(ourListener.getCounter() == 1);
readObject.updateInBackground();
writeObject.save("New stuff! New stuff!");
synchronized(readObject) {
if (ourListener.getCounter() == 1)
readObject.wait();
}
// For some reason, we're getting two updates on our updateInBackground...
Assert.assertTrue(ourListener.getCounter() > 1);
writeHandle.close();
readHandle.close();
}
@Test
public void testVeryLast() throws Exception {
Log.info("CCNNetworkObjectTest: Entering testVeryLast -- dummy test to help track down blowup. Prefix {0}", testHelper.getClassNamespace());
Thread.sleep(1000);
Log.info("CCNNetworkObjectTest: Leaving testVeryLast -- dummy test to help track down blowup. Prefix {0}", testHelper.getClassNamespace());
}
public <T> CCNTime saveAndLog(String name, CCNNetworkObject<T> ecd, CCNTime version, T data) throws IOException {
CCNTime oldVersion = ecd.getVersion();
ecd.save(version, data);
Log.info("SAL: Saved " + name + ": " + ecd.getVersionedName() + " (" + ecd.getVersion() + ", updated from " + oldVersion + ")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public <T> CCNTime saveAsGoneAndLog(String name, CCNNetworkObject<T> ecd) throws IOException {
CCNTime oldVersion = ecd.getVersion();
ecd.saveAsGone();
Log.info("SAGAL Saved " + name + ": " + ecd.getVersionedName() + " (" + ecd.getVersion() + ", updated from " + oldVersion + ")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public CCNTime waitForDataAndLog(String name, CCNNetworkObject<?> ecd) throws IOException {
ecd.waitForData();
Log.info("WFDAL: Initial read " + name + ", name: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
public CCNTime updateAndLog(String name, CCNNetworkObject<?> ecd, ContentName updateName) throws IOException {
if ((null == updateName) ? ecd.update() : ecd.update(updateName, null))
Log.info("Updated " + name + ", to name: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
else
Log.info("UAL: No update found for " + name + ((null != updateName) ? (" at name " + updateName) : "") + ", still: " + ecd.getVersionedName() + " (" + ecd.getVersion() +")" + " gone? " + ecd.isGone() + " data: " + ecd);
return ecd.getVersion();
}
}
| false | true | public void testBackgroundVerifier() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testBackgroundVerifier"), stringObjName, "name1");
try {
CCNStringObject c0 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c0.updateInBackground(true);
CCNStringObject c1 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c1.updateInBackground(true);
CCNTime t1 = saveAndLog("First string", c0, null, "Here is the first string.");
c0.waitForData();
c1.waitForData();
CCNTime c1Version = c1.getVersion();
Assert.assertTrue(c0.available());
Assert.assertTrue(c0.isSaved());
Assert.assertTrue(c1.available());
Assert.assertTrue(c1.isSaved());
Assert.assertEquals(t1, c1Version);
// Test background ability to throw away bogus data.
// change the version so a) it's later, and b) the signature won't verify
ContentName laterName = SegmentationProfile.segmentName(VersioningProfile.updateVersion(c1.getVersionedName()),
SegmentationProfile.baseSegment());
CCNFlowServer server = new CCNFlowServer(testName, null, false, CCNHandle.open());
server.addNameSpace(laterName);
ContentObject bogon =
new ContentObject(laterName, c0.getFirstSegment().signedInfo(),
c0.getFirstSegment().content(), c0.getFirstSegment().signature());
Log.info("Writing bogon: {0}", bogon.fullName());
server.put(bogon);
Thread.sleep(300);
// Should be no update
Assert.assertEquals(c0.getVersion(), c1Version);
Assert.assertEquals(c1.getVersion(), c1Version);
// Now write a newer one
CCNStringObject c2 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
CCNTime t2 = saveAndLog("Second string", c2, null, "Here is the second string.");
Log.info("Saved c2: " + c2.getVersionedName() + " c0 available? " + c0.available() + " c1 available? " + c1.available());
synchronized (c0) {
if (!c0.getVersion().equals(t2)) {
c0.wait(5000);
}
}
Assert.assertEquals("c0 update", c0.getVersion(), c2.getVersion());
synchronized (c1) {
if (!c1.getVersion().equals(t2)) {
c1.wait(5000);
}
}
Assert.assertEquals("c1 update", c1.getVersion(), c2.getVersion());
Assert.assertFalse(c1Version.equals(c1.getVersion()));
} finally {
removeNamespace(testName);
}
}
| public void testBackgroundVerifier() throws Exception {
ContentName testName = ContentName.fromNative(testHelper.getTestNamespace("testBackgroundVerifier"), stringObjName, "name1");
try {
CCNStringObject c0 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c0.updateInBackground(true);
CCNStringObject c1 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
c1.updateInBackground(true);
CCNTime t1 = saveAndLog("First string", c0, null, "Here is the first string.");
synchronized (c0) {
if (!c0.getVersion().equals(t1)) {
c0.wait(5000);
}
}
c1.waitForData();
CCNTime c1Version = c1.getVersion();
Assert.assertTrue(c0.available());
Assert.assertTrue(c0.isSaved());
Assert.assertTrue(c1.available());
Assert.assertTrue(c1.isSaved());
Assert.assertEquals(t1, c1Version);
// Test background ability to throw away bogus data.
// change the version so a) it's later, and b) the signature won't verify
ContentName laterName = SegmentationProfile.segmentName(VersioningProfile.updateVersion(c1.getVersionedName()),
SegmentationProfile.baseSegment());
CCNFlowServer server = new CCNFlowServer(testName, null, false, CCNHandle.open());
server.addNameSpace(laterName);
ContentObject bogon =
new ContentObject(laterName, c0.getFirstSegment().signedInfo(),
c0.getFirstSegment().content(), c0.getFirstSegment().signature());
Log.info("Writing bogon: {0}", bogon.fullName());
server.put(bogon);
Thread.sleep(300);
// Should be no update
Assert.assertEquals(c0.getVersion(), c1Version);
Assert.assertEquals(c1.getVersion(), c1Version);
// Now write a newer one
CCNStringObject c2 = new CCNStringObject(testName, (String)null, SaveType.RAW, CCNHandle.open());
CCNTime t2 = saveAndLog("Second string", c2, null, "Here is the second string.");
Log.info("Saved c2: " + c2.getVersionedName() + " c0 available? " + c0.available() + " c1 available? " + c1.available());
synchronized (c0) {
if (!c0.getVersion().equals(t2)) {
c0.wait(5000);
}
Log.info("waited - t2 is {0}", t2);
}
Assert.assertEquals("c0 update", c0.getVersion(), c2.getVersion());
synchronized (c1) {
if (!c1.getVersion().equals(t2)) {
c1.wait(5000);
}
}
Assert.assertEquals("c1 update", c1.getVersion(), c2.getVersion());
Assert.assertFalse(c1Version.equals(c1.getVersion()));
} finally {
removeNamespace(testName);
}
}
|
diff --git a/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java b/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java
index 5d6f75463..049a6c65a 100644
--- a/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java
+++ b/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java
@@ -1,146 +1,146 @@
package org.tmatesoft.svn.core.internal.wc2.remote;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.tmatesoft.svn.core.SVNCommitInfo;
import org.tmatesoft.svn.core.SVNErrorCode;
import org.tmatesoft.svn.core.SVNErrorMessage;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNNodeKind;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.util.SVNHashMap;
import org.tmatesoft.svn.core.internal.util.SVNPathUtil;
import org.tmatesoft.svn.core.internal.wc.ISVNCommitPathHandler;
import org.tmatesoft.svn.core.internal.wc.SVNCommitUtil;
import org.tmatesoft.svn.core.internal.wc.SVNErrorManager;
import org.tmatesoft.svn.core.internal.wc.SVNEventFactory;
import org.tmatesoft.svn.core.internal.wc.SVNPropertiesManager;
import org.tmatesoft.svn.core.internal.wc17.SVNWCUtils;
import org.tmatesoft.svn.core.internal.wc2.SvnRemoteOperationRunner;
import org.tmatesoft.svn.core.io.ISVNEditor;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.wc.ISVNEventHandler;
import org.tmatesoft.svn.core.wc.SVNEventAction;
import org.tmatesoft.svn.core.wc2.SvnCommitItem;
import org.tmatesoft.svn.core.wc2.SvnRemoteDelete;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import org.tmatesoft.svn.util.SVNLogType;
public class SvnRemoteRemoteDelete extends SvnRemoteOperationRunner<SVNCommitInfo, SvnRemoteDelete> {
@Override
protected SVNCommitInfo run() throws SVNException {
if (getOperation().getTargets().size() == 0) {
return SVNCommitInfo.NULL;
}
SVNHashMap reposInfo = new SVNHashMap();
SVNHashMap relPathInfo = new SVNHashMap();
for (SvnTarget target : getOperation().getTargets()) {
SVNURL url = target.getURL();
SVNRepository repository = null;
SVNURL reposRoot = null;
String reposRelPath = null;
ArrayList<String> relPaths;
SVNNodeKind kind;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
reposRoot = (SVNURL) rootUrls.next();
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
if (reposRelPath != null) {
repository = (SVNRepository)reposInfo.get(reposRoot);
relPaths = (ArrayList<String>)relPathInfo.get(reposRoot);
relPaths.add(reposRelPath);
}
}
if (repository == null) {
- repository = getRepositoryAccess().createRepository(url, null, true);
+ repository = getRepositoryAccess().createRepository(url, null, false);
reposRoot = repository.getRepositoryRoot(true);
repository.setLocation(reposRoot, false);
reposInfo.put(reposRoot, repository);
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
relPaths = new ArrayList<String>();
relPathInfo.put(reposRoot, relPaths);
relPaths.add(reposRelPath);
}
kind = repository.checkPath(reposRelPath, -1);
if (kind == SVNNodeKind.NONE) {
- SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '%s' does not exist", url);
+ SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '{0}' does not exist", url);
SVNErrorManager.error(err, SVNLogType.WC);
}
}
SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties());
SVNCommitInfo info = null;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
SVNURL reposRoot = (SVNURL) rootUrls.next();
SVNRepository repository = (SVNRepository)reposInfo.get(reposRoot);
ArrayList<String> paths = (ArrayList<String>)relPathInfo.get(reposRoot);
info = singleRepositoryDelete(repository, reposRoot, paths);
if (info != null) {
getOperation().receive(SvnTarget.fromURL(reposRoot), info);
}
}
return info != null ? info : SVNCommitInfo.NULL;
}
private SVNCommitInfo singleRepositoryDelete(SVNRepository repository, SVNURL rootURL, List<String> paths) throws SVNException {
if (paths.isEmpty()) {
paths.add(SVNPathUtil.tail(rootURL.getURIEncodedPath()));
rootURL = rootURL.removePathTail();
}
String commitMessage;
if (getOperation().getCommitHandler() != null) {
SvnCommitItem[] commitItems = new SvnCommitItem[paths.size()];
for (int i = 0; i < commitItems.length; i++) {
String path = (String) paths.get(i);
SvnCommitItem item = new SvnCommitItem();
item.setKind(SVNNodeKind.NONE);
item.setUrl(rootURL.appendPath(path, true));
item.setFlags(SvnCommitItem.DELETE);
commitItems[i] = item;
}
commitMessage = getOperation().getCommitHandler().getCommitMessage(getOperation().getCommitMessage(), commitItems);
if (commitMessage == null) {
return SVNCommitInfo.NULL;
}
commitMessage = SVNCommitUtil.validateCommitMessage(commitMessage);
}
else {
commitMessage = "";
}
ISVNEditor commitEditor = repository.getCommitEditor(commitMessage, null, false, getOperation().getRevisionProperties(), null);
ISVNCommitPathHandler deleter = new ISVNCommitPathHandler() {
public boolean handleCommitPath(String commitPath, ISVNEditor commitEditor) throws SVNException {
commitEditor.deleteEntry(commitPath, -1);
return false;
}
};
SVNCommitInfo info;
try {
SVNCommitUtil.driveCommitEditor(deleter, paths, commitEditor, -1);
info = commitEditor.closeEdit();
} catch (SVNException e) {
try {
commitEditor.abortEdit();
} catch (SVNException inner) {
}
throw e;
}
if (info != null && info.getNewRevision() >= 0) {
handleEvent(SVNEventFactory.createSVNEvent(null, SVNNodeKind.NONE, null, info.getNewRevision(), SVNEventAction.COMMIT_COMPLETED, null, null, null), ISVNEventHandler.UNKNOWN);
}
return info != null ? info : SVNCommitInfo.NULL;
}
}
| false | true | protected SVNCommitInfo run() throws SVNException {
if (getOperation().getTargets().size() == 0) {
return SVNCommitInfo.NULL;
}
SVNHashMap reposInfo = new SVNHashMap();
SVNHashMap relPathInfo = new SVNHashMap();
for (SvnTarget target : getOperation().getTargets()) {
SVNURL url = target.getURL();
SVNRepository repository = null;
SVNURL reposRoot = null;
String reposRelPath = null;
ArrayList<String> relPaths;
SVNNodeKind kind;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
reposRoot = (SVNURL) rootUrls.next();
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
if (reposRelPath != null) {
repository = (SVNRepository)reposInfo.get(reposRoot);
relPaths = (ArrayList<String>)relPathInfo.get(reposRoot);
relPaths.add(reposRelPath);
}
}
if (repository == null) {
repository = getRepositoryAccess().createRepository(url, null, true);
reposRoot = repository.getRepositoryRoot(true);
repository.setLocation(reposRoot, false);
reposInfo.put(reposRoot, repository);
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
relPaths = new ArrayList<String>();
relPathInfo.put(reposRoot, relPaths);
relPaths.add(reposRelPath);
}
kind = repository.checkPath(reposRelPath, -1);
if (kind == SVNNodeKind.NONE) {
SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '%s' does not exist", url);
SVNErrorManager.error(err, SVNLogType.WC);
}
}
SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties());
SVNCommitInfo info = null;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
SVNURL reposRoot = (SVNURL) rootUrls.next();
SVNRepository repository = (SVNRepository)reposInfo.get(reposRoot);
ArrayList<String> paths = (ArrayList<String>)relPathInfo.get(reposRoot);
info = singleRepositoryDelete(repository, reposRoot, paths);
if (info != null) {
getOperation().receive(SvnTarget.fromURL(reposRoot), info);
}
}
return info != null ? info : SVNCommitInfo.NULL;
}
| protected SVNCommitInfo run() throws SVNException {
if (getOperation().getTargets().size() == 0) {
return SVNCommitInfo.NULL;
}
SVNHashMap reposInfo = new SVNHashMap();
SVNHashMap relPathInfo = new SVNHashMap();
for (SvnTarget target : getOperation().getTargets()) {
SVNURL url = target.getURL();
SVNRepository repository = null;
SVNURL reposRoot = null;
String reposRelPath = null;
ArrayList<String> relPaths;
SVNNodeKind kind;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
reposRoot = (SVNURL) rootUrls.next();
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
if (reposRelPath != null) {
repository = (SVNRepository)reposInfo.get(reposRoot);
relPaths = (ArrayList<String>)relPathInfo.get(reposRoot);
relPaths.add(reposRelPath);
}
}
if (repository == null) {
repository = getRepositoryAccess().createRepository(url, null, false);
reposRoot = repository.getRepositoryRoot(true);
repository.setLocation(reposRoot, false);
reposInfo.put(reposRoot, repository);
reposRelPath = SVNWCUtils.isChild(reposRoot, url);
relPaths = new ArrayList<String>();
relPathInfo.put(reposRoot, relPaths);
relPaths.add(reposRelPath);
}
kind = repository.checkPath(reposRelPath, -1);
if (kind == SVNNodeKind.NONE) {
SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '{0}' does not exist", url);
SVNErrorManager.error(err, SVNLogType.WC);
}
}
SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties());
SVNCommitInfo info = null;
for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) {
SVNURL reposRoot = (SVNURL) rootUrls.next();
SVNRepository repository = (SVNRepository)reposInfo.get(reposRoot);
ArrayList<String> paths = (ArrayList<String>)relPathInfo.get(reposRoot);
info = singleRepositoryDelete(repository, reposRoot, paths);
if (info != null) {
getOperation().receive(SvnTarget.fromURL(reposRoot), info);
}
}
return info != null ? info : SVNCommitInfo.NULL;
}
|
diff --git a/src/Event.java b/src/Event.java
index 5568e1a..5c359bf 100644
--- a/src/Event.java
+++ b/src/Event.java
@@ -1,46 +1,46 @@
import org.joda.time.DateTime;
import org.joda.time.Interval;
public class Event {
public String myName;
public String myLocation;
public String myDescription;
public DateTime myStartTime;
public DateTime myEndTime;
public Interval myInterval;
- public Event(String name, String location, String description, DateTime starttime, DateTime endtime) {
+ public Event(String name, String location, String description, DateTime startTime, DateTime endTime) {
myName = name;
myLocation = location;
myDescription = description;
- myStartTime=starttime;
- myEndTime=endtime;
- myInterval = new Interval(starttime,endtime);
+ myStartTime=startTime;
+ myEndTime=endTime;
+ myInterval = new Interval(startTime,endTime);
}
public String toString(){
return "Event[" + myName + ", " + myLocation + ", " + myDescription + "]\n";
}
public DateTime getStartTime(){
return myStartTime;
}
public DateTime getEndTime(){
return myEndTime;
}
public String getEventDescription(){
return myDescription;
}
public String getLocation(){
return myLocation;
}
public String getName(){
return myName;
}
public Interval getInterval(){
return myInterval;
}
}
| false | true | public Event(String name, String location, String description, DateTime starttime, DateTime endtime) {
myName = name;
myLocation = location;
myDescription = description;
myStartTime=starttime;
myEndTime=endtime;
myInterval = new Interval(starttime,endtime);
}
| public Event(String name, String location, String description, DateTime startTime, DateTime endTime) {
myName = name;
myLocation = location;
myDescription = description;
myStartTime=startTime;
myEndTime=endTime;
myInterval = new Interval(startTime,endTime);
}
|
diff --git a/concurrency/src/training/concurrency/Logger.java b/concurrency/src/training/concurrency/Logger.java
index bd8a5e4..98723fb 100644
--- a/concurrency/src/training/concurrency/Logger.java
+++ b/concurrency/src/training/concurrency/Logger.java
@@ -1,23 +1,25 @@
package training.concurrency;
import java.util.LinkedList;
import java.util.Queue;
public class Logger implements Runnable {
private final Queue<String> messages = new LinkedList<String>();
public void log(String message) {
messages.add(message);
}
@Override
public void run() {
while (true) {
- if (!messages.isEmpty()) {
+ if (messages.isEmpty()) {
+ Thread.yield();
+ } else {
System.out.println(messages.remove());
}
}
}
}
| true | true | public void run() {
while (true) {
if (!messages.isEmpty()) {
System.out.println(messages.remove());
}
}
}
| public void run() {
while (true) {
if (messages.isEmpty()) {
Thread.yield();
} else {
System.out.println(messages.remove());
}
}
}
|
diff --git a/trunk/crux-dev/src/main/java/org/cruxframework/crux/core/rebind/rest/AbstractParameterHelper.java b/trunk/crux-dev/src/main/java/org/cruxframework/crux/core/rebind/rest/AbstractParameterHelper.java
index d1ef8f125..092f1b666 100644
--- a/trunk/crux-dev/src/main/java/org/cruxframework/crux/core/rebind/rest/AbstractParameterHelper.java
+++ b/trunk/crux-dev/src/main/java/org/cruxframework/crux/core/rebind/rest/AbstractParameterHelper.java
@@ -1,108 +1,108 @@
/*
* Copyright 2013 cruxframework.org.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.cruxframework.crux.core.rebind.rest;
import java.lang.annotation.Annotation;
import java.util.Date;
import org.cruxframework.crux.core.client.utils.StringUtils;
import org.cruxframework.crux.core.rebind.AbstractProxyCreator.SourcePrinter;
import org.cruxframework.crux.core.server.rest.core.dispatch.StringParameterInjector;
import org.cruxframework.crux.core.shared.rest.annotation.PathParam;
import org.cruxframework.crux.core.utils.JClassUtils;
import org.cruxframework.crux.core.utils.JClassUtils.PropertyInfo;
import com.google.gwt.core.ext.GeneratorContext;
import com.google.gwt.core.ext.typeinfo.JClassType;
import com.google.gwt.core.ext.typeinfo.JType;
/**
* @author Thiago da Rosa de Bustamante
*
*/
abstract class AbstractParameterHelper
{
protected JClassType stringType;
protected JClassType dateType;
public AbstractParameterHelper(GeneratorContext context)
{
stringType = context.getTypeOracle().findType(String.class.getCanonicalName());
dateType = context.getTypeOracle().findType(Date.class.getCanonicalName());
}
protected void generateMethodParamToCodeForComplexType(SourcePrinter srcWriter, String parameterStringVariable, JType parameterType,
String parameterName, String parameterExpression, String parameterCheckExpression, Annotation annotation)
{
PropertyInfo[] propertiesInfo = JClassUtils.extractBeanPropertiesInfo(parameterType.isClassOrInterface());
for (PropertyInfo propertyInfo : propertiesInfo)
{
String newParameterName = (StringUtils.isEmpty(parameterName)?propertyInfo.getName():parameterName+"."+propertyInfo.getName());
if (JClassUtils.isSimpleType(propertyInfo.getType()))
{
generateMethodParamToCodeForSimpleType(srcWriter, parameterStringVariable, propertyInfo.getType(),
newParameterName, parameterExpression+"."+propertyInfo.getReadMethod().getName()+"()",
(propertyInfo.getType().isPrimitive()!=null?
parameterCheckExpression:
parameterCheckExpression + " && " + parameterExpression+"."+propertyInfo.getReadMethod().getName()+"()!=null"), annotation);
}
else
{
generateMethodParamToCodeForComplexType(srcWriter, parameterStringVariable, propertyInfo.getType(),
newParameterName, parameterExpression+"."+propertyInfo.getReadMethod().getName()+"()",
parameterCheckExpression + " && " + parameterExpression+"."+propertyInfo.getReadMethod().getName()+"()!=null", annotation);
}
}
}
protected void generateMethodParamToCodeForSimpleType(SourcePrinter srcWriter, String parameterStringVariable, JType parameterType,
String parameterName, String parameterexpression, String parameterCheckExpression, Annotation annotation)
{
JClassType jClassType = parameterType.isClassOrInterface();
String nullParam = "\"\"";
if(annotation instanceof PathParam)
{
nullParam = "\""+StringParameterInjector.CRUX_NULL+"\"";
- parameterCheckExpression += "&&!"+parameterName+".equals(\"\")";
}
if (jClassType != null)
{
if (jClassType.isAssignableTo(stringType))
{
+ parameterCheckExpression += "&&!"+parameterexpression+".trim().equals(\"\")";
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?"+parameterexpression + ":" + nullParam + ")));");
}
else if (jClassType.isAssignableTo(dateType))
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?Long.toString("+parameterexpression+".getTime())" + ":" + nullParam + ")));");
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
}
| false | true | protected void generateMethodParamToCodeForSimpleType(SourcePrinter srcWriter, String parameterStringVariable, JType parameterType,
String parameterName, String parameterexpression, String parameterCheckExpression, Annotation annotation)
{
JClassType jClassType = parameterType.isClassOrInterface();
String nullParam = "\"\"";
if(annotation instanceof PathParam)
{
nullParam = "\""+StringParameterInjector.CRUX_NULL+"\"";
parameterCheckExpression += "&&!"+parameterName+".equals(\"\")";
}
if (jClassType != null)
{
if (jClassType.isAssignableTo(stringType))
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?"+parameterexpression + ":" + nullParam + ")));");
}
else if (jClassType.isAssignableTo(dateType))
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?Long.toString("+parameterexpression+".getTime())" + ":" + nullParam + ")));");
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
| protected void generateMethodParamToCodeForSimpleType(SourcePrinter srcWriter, String parameterStringVariable, JType parameterType,
String parameterName, String parameterexpression, String parameterCheckExpression, Annotation annotation)
{
JClassType jClassType = parameterType.isClassOrInterface();
String nullParam = "\"\"";
if(annotation instanceof PathParam)
{
nullParam = "\""+StringParameterInjector.CRUX_NULL+"\"";
}
if (jClassType != null)
{
if (jClassType.isAssignableTo(stringType))
{
parameterCheckExpression += "&&!"+parameterexpression+".trim().equals(\"\")";
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?"+parameterexpression + ":" + nullParam + ")));");
}
else if (jClassType.isAssignableTo(dateType))
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?Long.toString("+parameterexpression+".getTime())" + ":" + nullParam + ")));");
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
else
{
srcWriter.println(parameterStringVariable+"="+parameterStringVariable+".replace(\"{"+parameterName+"}\", URL.encodePathSegment("+
"("+parameterCheckExpression+"?(\"\"+"+parameterexpression+")" + ":" + nullParam + ")));");
}
}
|
diff --git a/src/net/johnpwood/android/standuptimer/ConfigureStandupTimer.java b/src/net/johnpwood/android/standuptimer/ConfigureStandupTimer.java
index b25c4e4..d26352e 100644
--- a/src/net/johnpwood/android/standuptimer/ConfigureStandupTimer.java
+++ b/src/net/johnpwood/android/standuptimer/ConfigureStandupTimer.java
@@ -1,129 +1,129 @@
package net.johnpwood.android.standuptimer;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import android.widget.TextView;
public class ConfigureStandupTimer extends Activity implements OnClickListener {
private static final String MEETING_LENGTH_POS = "meetingLengthPos";
private static final String NUMBER_OF_PARTICIPANTS = "numberOfParticipants";
private int meetingLengthPos = 0;
private int numParticipants = 0;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
initializeGUIElements();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.about:
startActivity(new Intent(this, About.class));
return true;
case R.id.settings:
startActivity(new Intent(this, Prefs.class));
return true;
case R.id.quit:
finish();
return true;
}
return false;
}
public void onClick(View v) {
Intent i = new Intent(this, StandupTimer.class);
Spinner s = (Spinner) findViewById(R.id.meeting_length);
meetingLengthPos = s.getSelectedItemPosition();
i.putExtra("meetingLengthPos", meetingLengthPos);
TextView t = (TextView) findViewById(R.id.num_participants);
- int numParticipants = Integer.parseInt(t.getText().toString());
+ numParticipants = Integer.parseInt(t.getText().toString());
i.putExtra("numParticipants", numParticipants);
if (numParticipants > 1 && numParticipants <= 20) {
saveState();
startActivity(i);
} else {
showDialog(0);
}
}
private void initializeGUIElements() {
loadState();
initializeNumberOfParticipants();
initializeMeetingLengthSpinner();
initializeStartButton();
}
private void initializeNumberOfParticipants() {
TextView t = (TextView) findViewById(R.id.num_participants);
t.setText("" + numParticipants);
}
private void initializeStartButton() {
View startButton = findViewById(R.id.start_button);
startButton.setOnClickListener(this);
}
private void initializeMeetingLengthSpinner() {
Spinner s = (Spinner) findViewById(R.id.meeting_length);
ArrayAdapter<?> adapter = ArrayAdapter.createFromResource(this, R.array.meeting_lengths,
android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
s.setAdapter(adapter);
s.setSelection(meetingLengthPos);
}
private void saveState() {
Logger.i("Saving state. mettingLengthPos = " + meetingLengthPos + ", numParticipants = " + numParticipants);
SharedPreferences.Editor preferences = getPreferences(MODE_PRIVATE).edit();
preferences.putInt(MEETING_LENGTH_POS, meetingLengthPos);
preferences.putInt(NUMBER_OF_PARTICIPANTS, numParticipants);
preferences.commit();
}
private void loadState() {
SharedPreferences preferences = getPreferences(MODE_PRIVATE);
meetingLengthPos = preferences.getInt(MEETING_LENGTH_POS, 0);
numParticipants = preferences.getInt(NUMBER_OF_PARTICIPANTS, 2);
Logger.i("Retrieved state. mettingLengthPos = " + meetingLengthPos + ", numParticipants = " + numParticipants);
}
@Override
protected Dialog onCreateDialog(int id) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(R.string.valid_num_participants_warning)
.setCancelable(true)
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dismissDialog(0);
}
});
return builder.create();
}
}
| true | true | public void onClick(View v) {
Intent i = new Intent(this, StandupTimer.class);
Spinner s = (Spinner) findViewById(R.id.meeting_length);
meetingLengthPos = s.getSelectedItemPosition();
i.putExtra("meetingLengthPos", meetingLengthPos);
TextView t = (TextView) findViewById(R.id.num_participants);
int numParticipants = Integer.parseInt(t.getText().toString());
i.putExtra("numParticipants", numParticipants);
if (numParticipants > 1 && numParticipants <= 20) {
saveState();
startActivity(i);
} else {
showDialog(0);
}
}
| public void onClick(View v) {
Intent i = new Intent(this, StandupTimer.class);
Spinner s = (Spinner) findViewById(R.id.meeting_length);
meetingLengthPos = s.getSelectedItemPosition();
i.putExtra("meetingLengthPos", meetingLengthPos);
TextView t = (TextView) findViewById(R.id.num_participants);
numParticipants = Integer.parseInt(t.getText().toString());
i.putExtra("numParticipants", numParticipants);
if (numParticipants > 1 && numParticipants <= 20) {
saveState();
startActivity(i);
} else {
showDialog(0);
}
}
|
diff --git a/edu/mit/wi/haploview/DPrimeDisplay.java b/edu/mit/wi/haploview/DPrimeDisplay.java
index e39adc7..929f3ae 100644
--- a/edu/mit/wi/haploview/DPrimeDisplay.java
+++ b/edu/mit/wi/haploview/DPrimeDisplay.java
@@ -1,545 +1,545 @@
package edu.mit.wi.haploview;
import java.awt.*;
import java.awt.geom.GeneralPath;
import java.awt.image.BufferedImage;
import java.awt.event.*;
import java.util.Vector;
import javax.swing.*;
import javax.swing.border.CompoundBorder;
class DPrimeDisplay extends JComponent{
private static final int H_BORDER = 15;
private static final int V_BORDER = 15;
private static final int TEXT_NUMBER_GAP = 3;
private static final int DEFAULT_BOX_SIZE = 50;
private static final int DEFAULT_BOX_RADIUS = 24;
private static final int TICK_HEIGHT = 8;
private static final int TICK_BOTTOM = 50;
private int widestMarkerName = 80; //default size
private int infoHeight = 0;
private int boxSize = DEFAULT_BOX_SIZE;
private int boxRadius = DEFAULT_BOX_RADIUS;
private int lowX, highX, lowY, highY;
private int left, top, clickXShift, clickYShift;
private Font boxFont = new Font("SansSerif", Font.PLAIN, 12);
private Font markerNumFont = new Font("SansSerif", Font.BOLD, 12);
private Font markerNameFont = new Font("Default", Font.PLAIN, 12);
private boolean markersLoaded;
private boolean printDetails = true;
private boolean noImage = true;
private Vector blocks;
private Rectangle ir = new Rectangle();
private Rectangle worldmapRect = new Rectangle(0,0,-1,-1);
private BufferedImage worldmap;
PairwiseLinkage dPrimeTable[][];
private Dimension chartSize;
DPrimeDisplay(PairwiseLinkage[][] t, boolean b, Vector v){
markersLoaded = b;
dPrimeTable = t;
blocks = v;
this.setDoubleBuffered(true);
addMouseListener(new PopMouseListener(this));
}
public void loadMarkers(){
noImage = true;
markersLoaded = true;
repaint();
}
public void paintComponent(Graphics g){
Graphics2D g2 = (Graphics2D) g;
Dimension size = getSize();
Dimension pref = getPreferredSize();
Rectangle visRect = getVisibleRect();
/*
boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1);
if (boxSize < 12){boxSize=12;}
if (boxSize < 25){
printDetails = false;
boxRadius = boxSize/2;
}else{
boxRadius = boxSize/2 - 1;
}
*/
//okay so this dumb if block is to prevent the ugly repainting
//bug when loading markers after the data are already being displayed,
//results in a little off-centering for small datasets, but not too bad.
//clickxshift and clickyshift are used later to translate from x,y coords
//to the pair of markers comparison at those coords
if (!(markersLoaded)){
g2.translate((size.width - pref.width) / 2,
(size.height - pref.height) / 2);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top + (size.height - pref.height)/2;
} else {
g2.translate((size.width - pref.width) / 2,
0);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top;
}
FontMetrics boxFontMetrics = g.getFontMetrics(boxFont);
int diamondX[] = new int[4];
int diamondY[] = new int[4];
Polygon diamond;
left = H_BORDER;
top = V_BORDER;
FontMetrics metrics;
int ascent;
g2.setColor(this.getBackground());
g2.fillRect(0,0,pref.width,pref.height);
g2.setColor(Color.BLACK);
BasicStroke thickerStroke = new BasicStroke(1);
BasicStroke thinnerStroke = new BasicStroke(0.25f);
BasicStroke fatStroke = new BasicStroke(3.0f);
if (markersLoaded) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
//// draw the marker locations
int wide = (dPrimeTable.length-1) * boxSize;
//TODO: talk to kirby about locusview scaling gizmo
int lineLeft = wide/20;
int lineSpan = (wide/10)*9;
- long minpos = Chromosome.getFilteredMarker(0).getPosition();
- long maxpos = Chromosome.getFilteredMarker(Chromosome.getSize()-1).getPosition();
+ long minpos = Chromosome.getMarker(0).getPosition();
+ long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition();
double spanpos = maxpos - minpos;
g2.setStroke(thinnerStroke);
g2.setColor(Color.white);
g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
g2.setColor(Color.black);
g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
- for (int i = 0; i < Chromosome.getSize(); i++) {
+ for (int i = 0; i < Chromosome.getFilteredSize(); i++) {
double pos = (Chromosome.getFilteredMarker(i).getPosition() - minpos) / spanpos;
int xx = (int) (left + lineLeft + lineSpan*pos);
g2.setStroke(thickerStroke);
g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT);
g2.setStroke(thinnerStroke);
g.drawLine(xx, 5 + TICK_HEIGHT,
left + i*boxSize, TICK_BOTTOM);
}
top += TICK_BOTTOM;
//// draw the marker names
if (printDetails){
g.setFont(markerNameFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
widestMarkerName = metrics.stringWidth(Chromosome.getFilteredMarker(0).getName());
for (int x = 1; x < dPrimeTable.length; x++) {
int thiswide = metrics.stringWidth(Chromosome.getFilteredMarker(x).getName());
if (thiswide > widestMarkerName) widestMarkerName = thiswide;
}
g2.translate(left, top + widestMarkerName);
g2.rotate(-Math.PI / 2.0);
for (int x = 0; x < dPrimeTable.length; x++) {
g2.drawString(Chromosome.getFilteredMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3);
}
g2.rotate(Math.PI / 2.0);
g2.translate(-left, -(top + widestMarkerName));
// move everybody down
top += widestMarkerName + TEXT_NUMBER_GAP;
}
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
//// draw the marker numbers
if (printDetails){
g.setFont(markerNumFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
for (int x = 0; x < dPrimeTable.length; x++) {
String mark = String.valueOf(Chromosome.realIndex[x] + 1);
g.drawString(mark,
left + x*boxSize - metrics.stringWidth(mark)/2,
top + ascent);
}
top += boxRadius/2; // give a little space between numbers and boxes
}
//the following values are the bounds on the boxes we want to
//display given that the current window is 'visRect'
lowX = (visRect.x-clickXShift-(visRect.y +
visRect.height-clickYShift))/boxSize;
if (lowX < 0) {
lowX = 0;
}
highX = ((visRect.x + visRect.width)/boxSize)+1;
if (highX > dPrimeTable.length-1){
highX = dPrimeTable.length-1;
}
lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize;
if (lowY < lowX+1){
lowY = lowX+1;
}
highY = (((visRect.x-clickXShift+visRect.width) +
(visRect.y-clickYShift+visRect.height))/boxSize)+1;
if (highY > dPrimeTable.length){
highY = dPrimeTable.length;
}
// draw table column by column
for (int x = lowX; x < highX; x++) {
//always draw the fewest possible boxes
if (lowY < x+1){
lowY = x+1;
}
for (int y = lowY; y < highY; y++) {
if (dPrimeTable[x][y] == null){
continue;
}
double d = dPrimeTable[x][y].getDPrime();
//double l = dPrimeTable[x][y].getLOD();
Color boxColor = dPrimeTable[x][y].getColor();
// draw markers above
int xx = left + (x + y) * boxSize / 2;
int yy = top + (y - x) * boxSize / 2;
diamondX[0] = xx; diamondY[0] = yy - boxRadius;
diamondX[1] = xx + boxRadius; diamondY[1] = yy;
diamondX[2] = xx; diamondY[2] = yy + boxRadius;
diamondX[3] = xx - boxRadius; diamondY[3] = yy;
diamond = new Polygon(diamondX, diamondY, 4);
g.setColor(boxColor);
g.fillPolygon(diamond);
if (boxColor == Color.white) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(Color.lightGray);
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
if(printDetails){
g.setFont(boxFont);
ascent = boxFontMetrics.getAscent();
int val = (int) (d * 100);
g.setColor((val < 50) ? Color.gray : Color.black);
if (val != 100) {
String valu = String.valueOf(val);
int widf = boxFontMetrics.stringWidth(valu);
g.drawString(valu, xx - widf/2, yy + ascent/2);
}
}
}
}
//highlight blocks
boolean even = true;
//g.setColor(new Color(153,255,153));
g.setColor(new Color(51,153,51));
g2.setStroke(fatStroke);
for (int i = 0; i < blocks.size(); i++){
int[] theBlock = (int[])blocks.elementAt(i);
int first = theBlock[0];
int last = theBlock[theBlock.length-1];
g.drawLine(left + (2*first + 1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius);
g.drawLine(left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius,
left + (2*last - 1) * boxSize/2+boxRadius,
top + boxSize/2);
for (int j = 0; j < theBlock.length; j++){
g.drawLine(left + (2*theBlock[j]+1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (2*theBlock[j]+1) * boxSize/2,
top + boxSize/2 - boxRadius);
g.drawLine (left + (2*theBlock[j]) * boxSize/2 - boxRadius,
top + boxSize/2 - boxRadius,
left + (2*theBlock[j]) * boxSize/2,
top + boxSize/2);
}
}
g2.setStroke(thickerStroke);
if (pref.getWidth() > (2*visRect.width)){
//dataset is big enough to require worldmap
if (noImage){
//first time through draw a worldmap if dataset is big:
final int WM_MAX_WIDTH = visRect.width/3;
double scalefactor;
scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH;
CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(),
BorderFactory.createLoweredBevelBorder());
worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2,
(int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2,
BufferedImage.TYPE_3BYTE_BGR);
Graphics gw = worldmap.getGraphics();
Graphics2D gw2 = (Graphics2D)(gw);
gw2.setColor(this.getBackground());
gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2);
//make a pretty border
gw2.setColor(Color.BLACK);
wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1);
ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1);
double prefBoxSize = boxSize/scalefactor;
float[] smallDiamondX = new float[4];
float[] smallDiamondY = new float[4];
GeneralPath gp;
for (int x = 0; x < dPrimeTable.length-1; x++){
for (int y = x+1; y < dPrimeTable.length; y++){
if (dPrimeTable[x][y] == null){
continue;
}
double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left;
double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top;
smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2);
smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy;
smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2);
smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy;
gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length);
gp.moveTo(smallDiamondX[0],smallDiamondY[0]);
for (int i = 1; i < smallDiamondX.length; i++){
gp.lineTo(smallDiamondX[i], smallDiamondY[i]);
}
gp.closePath();
gw2.setColor(dPrimeTable[x][y].getColor());
gw2.fill(gp);
}
}
noImage = false;
}
g.drawImage(worldmap,visRect.x,
visRect.y + visRect.height - worldmap.getHeight(),
this);
worldmapRect = new Rectangle(visRect.x,
visRect.y+visRect.height-worldmap.getHeight(),
worldmap.getWidth(),
worldmap.getHeight());
//draw the outline of the viewport
g.setColor(Color.BLACK);
double hRatio = ir.getWidth()/pref.getWidth();
double vRatio = ir.getHeight()/pref.getHeight();
int hBump = worldmap.getWidth()-ir.width;
int vBump = worldmap.getHeight()-ir.height;
//bump a few pixels to avoid drawing on the border
g.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x,
(int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()),
(int)(visRect.width*hRatio),
(int)(visRect.height*vRatio));
}
}
public Dimension getPreferredSize() {
//loop through table to find deepest non-null comparison
int count = 0;
for (int x = 0; x < dPrimeTable.length-1; x++){
for (int y = x+1; y < dPrimeTable.length; y++){
if (dPrimeTable[x][y] != null){
if (count < y-x){
count = y-x;
}
}
}
}
//add one so we don't clip bottom box
count ++;
int high = 2*V_BORDER + count*boxSize/2;
chartSize = new Dimension(2*H_BORDER + boxSize*(dPrimeTable.length-1),high);
//this dimension is just the area taken up by the dprime chart
//it is used in drawing the worldmap
if (markersLoaded){
infoHeight = TICK_BOTTOM + widestMarkerName + TEXT_NUMBER_GAP;
high += infoHeight;
}else{
infoHeight=0;
}
return new Dimension(2*H_BORDER + boxSize*(dPrimeTable.length-1), high);
}
int[] centerString(String s, FontMetrics fm) {
int[] returnArray = new int[2];
returnArray[0] = (30-fm.stringWidth(s))/2;
returnArray[1] = 10+(30-fm.getAscent())/2;
return returnArray;
}
public void refreshBlocks(Vector v) {
//recolor the worldmap and change the blocklist
noImage=true;
blocks = v;
}
class PopMouseListener implements MouseListener{
JComponent caller;
public PopMouseListener(DPrimeDisplay d){
caller = d;
}
public void mouseClicked(MouseEvent e) {
if ((e.getModifiers() & InputEvent.BUTTON1_MASK) ==
InputEvent.BUTTON1_MASK) {
int clickX = e.getX();
int clickY = e.getY();
if (worldmapRect.contains(clickX,clickY)){
//convert a click on the worldmap to a point on the big picture
int bigClickX = (((clickX - caller.getVisibleRect().x) * chartSize.width) /
worldmap.getWidth())-caller.getVisibleRect().width/2;
int bigClickY = (((clickY - caller.getVisibleRect().y -
(caller.getVisibleRect().height-worldmap.getHeight())) *
chartSize.height) / worldmap.getHeight()) -
caller.getVisibleRect().height/2 + infoHeight;
//if the clicks are near the edges, correct values
if (bigClickX > chartSize.width - caller.getVisibleRect().width){
bigClickX = chartSize.width - caller.getVisibleRect().width;
}
if (bigClickX < 0){
bigClickX = 0;
}
if (bigClickY > chartSize.height - caller.getVisibleRect().height + infoHeight){
bigClickY = chartSize.height - caller.getVisibleRect().height + infoHeight;
}
if (bigClickY < 0){
bigClickY = 0;
}
((JViewport)caller.getParent()).setViewPosition(new Point(bigClickX,bigClickY));
}
}
}
public void mousePressed (MouseEvent e) {
if ((e.getModifiers() & InputEvent.BUTTON3_MASK) ==
InputEvent.BUTTON3_MASK){
final int clickX = e.getX();
final int clickY = e.getY();
double dboxX = (double)(clickX - clickXShift - (clickY-clickYShift))/boxSize;
double dboxY = (double)(clickX - clickXShift + (clickY-clickYShift))/boxSize;
final int boxX, boxY;
if (dboxX < 0){
boxX = (int)(dboxX - 0.5);
} else{
boxX = (int)(dboxX + 0.5);
}
if (dboxY < 0){
boxY = (int)(dboxY - 0.5);
}else{
boxY = (int)(dboxY + 0.5);
}
if ((boxX >= lowX && boxX <= highX) &&
(boxY > boxX && boxY < highY) &&
!(worldmapRect.contains(clickX,clickY))){
if (dPrimeTable[boxX][boxY] != null){
final SwingWorker worker = new SwingWorker(){
public Object construct(){
final int leftMargin = 12;
String[] displayStrings = new String[5];
if (markersLoaded){
displayStrings[0] = new String ("(" +Chromosome.getFilteredMarker(boxX).getName() +
", " + Chromosome.getFilteredMarker(boxY).getName() + ")");
}else{
displayStrings[0] = new String("(" + (Chromosome.realIndex[boxX]+1) + ", " +
(Chromosome.realIndex[boxY]+1) + ")");
}
displayStrings[1] = new String ("D': " + dPrimeTable[boxX][boxY].getDPrime());
displayStrings[2] = new String ("LOD: " + dPrimeTable[boxX][boxY].getLOD());
displayStrings[3] = new String ("r^2: " + dPrimeTable[boxX][boxY].getRSquared());
displayStrings[4] = new String ("D' conf. bounds: " +
dPrimeTable[boxX][boxY].getConfidenceLow() + "-" +
dPrimeTable[boxX][boxY].getConfidenceHigh());
Graphics g = caller.getGraphics();
g.setFont(boxFont);
FontMetrics metrics = g.getFontMetrics();
int strlen = 0;
for (int x = 0; x < 5; x++){
if (strlen < metrics.stringWidth(displayStrings[x])){
strlen = metrics.stringWidth(displayStrings[x]);
}
}
//edge shifts prevent window from popping up partially offscreen
int visRightBound = (int)(caller.getVisibleRect().getWidth() + caller.getVisibleRect().getX());
int visBotBound = (int)(caller.getVisibleRect().getHeight() + caller.getVisibleRect().getY());
int rightEdgeShift = 0;
if (clickX + strlen + leftMargin +5 > visRightBound){
rightEdgeShift = clickX + strlen + leftMargin + 10 - visRightBound;
}
int botEdgeShift = 0;
if (clickY + 5*metrics.getHeight()+10 > visBotBound){
botEdgeShift = clickY + 5*metrics.getHeight()+15 - visBotBound;
}
g.setColor(Color.WHITE);
g.fillRect(clickX+1-rightEdgeShift,
clickY+1-botEdgeShift,
strlen+leftMargin+4,
5*metrics.getHeight()+9);
g.setColor(Color.BLACK);
g.drawRect(clickX-rightEdgeShift,
clickY-botEdgeShift,
strlen+leftMargin+5,
5*metrics.getHeight()+10);
for (int x = 0; x < 5; x++){
g.drawString(displayStrings[x],clickX + leftMargin - rightEdgeShift,
clickY+5+((x+1)*metrics.getHeight())-botEdgeShift);
}
return "";
}
};
worker.start();
}
}
}
}
public void mouseReleased(MouseEvent e) {
if ((e.getModifiers() & InputEvent.BUTTON3_MASK) ==
InputEvent.BUTTON3_MASK){
caller.repaint();
}
}
public void mouseEntered(MouseEvent e) {
}
public void mouseExited(MouseEvent e) {
}
}
}
| false | true | public void paintComponent(Graphics g){
Graphics2D g2 = (Graphics2D) g;
Dimension size = getSize();
Dimension pref = getPreferredSize();
Rectangle visRect = getVisibleRect();
/*
boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1);
if (boxSize < 12){boxSize=12;}
if (boxSize < 25){
printDetails = false;
boxRadius = boxSize/2;
}else{
boxRadius = boxSize/2 - 1;
}
*/
//okay so this dumb if block is to prevent the ugly repainting
//bug when loading markers after the data are already being displayed,
//results in a little off-centering for small datasets, but not too bad.
//clickxshift and clickyshift are used later to translate from x,y coords
//to the pair of markers comparison at those coords
if (!(markersLoaded)){
g2.translate((size.width - pref.width) / 2,
(size.height - pref.height) / 2);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top + (size.height - pref.height)/2;
} else {
g2.translate((size.width - pref.width) / 2,
0);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top;
}
FontMetrics boxFontMetrics = g.getFontMetrics(boxFont);
int diamondX[] = new int[4];
int diamondY[] = new int[4];
Polygon diamond;
left = H_BORDER;
top = V_BORDER;
FontMetrics metrics;
int ascent;
g2.setColor(this.getBackground());
g2.fillRect(0,0,pref.width,pref.height);
g2.setColor(Color.BLACK);
BasicStroke thickerStroke = new BasicStroke(1);
BasicStroke thinnerStroke = new BasicStroke(0.25f);
BasicStroke fatStroke = new BasicStroke(3.0f);
if (markersLoaded) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
//// draw the marker locations
int wide = (dPrimeTable.length-1) * boxSize;
//TODO: talk to kirby about locusview scaling gizmo
int lineLeft = wide/20;
int lineSpan = (wide/10)*9;
long minpos = Chromosome.getFilteredMarker(0).getPosition();
long maxpos = Chromosome.getFilteredMarker(Chromosome.getSize()-1).getPosition();
double spanpos = maxpos - minpos;
g2.setStroke(thinnerStroke);
g2.setColor(Color.white);
g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
g2.setColor(Color.black);
g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
for (int i = 0; i < Chromosome.getSize(); i++) {
double pos = (Chromosome.getFilteredMarker(i).getPosition() - minpos) / spanpos;
int xx = (int) (left + lineLeft + lineSpan*pos);
g2.setStroke(thickerStroke);
g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT);
g2.setStroke(thinnerStroke);
g.drawLine(xx, 5 + TICK_HEIGHT,
left + i*boxSize, TICK_BOTTOM);
}
top += TICK_BOTTOM;
//// draw the marker names
if (printDetails){
g.setFont(markerNameFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
widestMarkerName = metrics.stringWidth(Chromosome.getFilteredMarker(0).getName());
for (int x = 1; x < dPrimeTable.length; x++) {
int thiswide = metrics.stringWidth(Chromosome.getFilteredMarker(x).getName());
if (thiswide > widestMarkerName) widestMarkerName = thiswide;
}
g2.translate(left, top + widestMarkerName);
g2.rotate(-Math.PI / 2.0);
for (int x = 0; x < dPrimeTable.length; x++) {
g2.drawString(Chromosome.getFilteredMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3);
}
g2.rotate(Math.PI / 2.0);
g2.translate(-left, -(top + widestMarkerName));
// move everybody down
top += widestMarkerName + TEXT_NUMBER_GAP;
}
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
//// draw the marker numbers
if (printDetails){
g.setFont(markerNumFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
for (int x = 0; x < dPrimeTable.length; x++) {
String mark = String.valueOf(Chromosome.realIndex[x] + 1);
g.drawString(mark,
left + x*boxSize - metrics.stringWidth(mark)/2,
top + ascent);
}
top += boxRadius/2; // give a little space between numbers and boxes
}
//the following values are the bounds on the boxes we want to
//display given that the current window is 'visRect'
lowX = (visRect.x-clickXShift-(visRect.y +
visRect.height-clickYShift))/boxSize;
if (lowX < 0) {
lowX = 0;
}
highX = ((visRect.x + visRect.width)/boxSize)+1;
if (highX > dPrimeTable.length-1){
highX = dPrimeTable.length-1;
}
lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize;
if (lowY < lowX+1){
lowY = lowX+1;
}
highY = (((visRect.x-clickXShift+visRect.width) +
(visRect.y-clickYShift+visRect.height))/boxSize)+1;
if (highY > dPrimeTable.length){
highY = dPrimeTable.length;
}
// draw table column by column
for (int x = lowX; x < highX; x++) {
//always draw the fewest possible boxes
if (lowY < x+1){
lowY = x+1;
}
for (int y = lowY; y < highY; y++) {
if (dPrimeTable[x][y] == null){
continue;
}
double d = dPrimeTable[x][y].getDPrime();
//double l = dPrimeTable[x][y].getLOD();
Color boxColor = dPrimeTable[x][y].getColor();
// draw markers above
int xx = left + (x + y) * boxSize / 2;
int yy = top + (y - x) * boxSize / 2;
diamondX[0] = xx; diamondY[0] = yy - boxRadius;
diamondX[1] = xx + boxRadius; diamondY[1] = yy;
diamondX[2] = xx; diamondY[2] = yy + boxRadius;
diamondX[3] = xx - boxRadius; diamondY[3] = yy;
diamond = new Polygon(diamondX, diamondY, 4);
g.setColor(boxColor);
g.fillPolygon(diamond);
if (boxColor == Color.white) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(Color.lightGray);
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
if(printDetails){
g.setFont(boxFont);
ascent = boxFontMetrics.getAscent();
int val = (int) (d * 100);
g.setColor((val < 50) ? Color.gray : Color.black);
if (val != 100) {
String valu = String.valueOf(val);
int widf = boxFontMetrics.stringWidth(valu);
g.drawString(valu, xx - widf/2, yy + ascent/2);
}
}
}
}
//highlight blocks
boolean even = true;
//g.setColor(new Color(153,255,153));
g.setColor(new Color(51,153,51));
g2.setStroke(fatStroke);
for (int i = 0; i < blocks.size(); i++){
int[] theBlock = (int[])blocks.elementAt(i);
int first = theBlock[0];
int last = theBlock[theBlock.length-1];
g.drawLine(left + (2*first + 1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius);
g.drawLine(left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius,
left + (2*last - 1) * boxSize/2+boxRadius,
top + boxSize/2);
for (int j = 0; j < theBlock.length; j++){
g.drawLine(left + (2*theBlock[j]+1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (2*theBlock[j]+1) * boxSize/2,
top + boxSize/2 - boxRadius);
g.drawLine (left + (2*theBlock[j]) * boxSize/2 - boxRadius,
top + boxSize/2 - boxRadius,
left + (2*theBlock[j]) * boxSize/2,
top + boxSize/2);
}
}
g2.setStroke(thickerStroke);
if (pref.getWidth() > (2*visRect.width)){
//dataset is big enough to require worldmap
if (noImage){
//first time through draw a worldmap if dataset is big:
final int WM_MAX_WIDTH = visRect.width/3;
double scalefactor;
scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH;
CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(),
BorderFactory.createLoweredBevelBorder());
worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2,
(int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2,
BufferedImage.TYPE_3BYTE_BGR);
Graphics gw = worldmap.getGraphics();
Graphics2D gw2 = (Graphics2D)(gw);
gw2.setColor(this.getBackground());
gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2);
//make a pretty border
gw2.setColor(Color.BLACK);
wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1);
ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1);
double prefBoxSize = boxSize/scalefactor;
float[] smallDiamondX = new float[4];
float[] smallDiamondY = new float[4];
GeneralPath gp;
for (int x = 0; x < dPrimeTable.length-1; x++){
for (int y = x+1; y < dPrimeTable.length; y++){
if (dPrimeTable[x][y] == null){
continue;
}
double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left;
double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top;
smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2);
smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy;
smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2);
smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy;
gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length);
gp.moveTo(smallDiamondX[0],smallDiamondY[0]);
for (int i = 1; i < smallDiamondX.length; i++){
gp.lineTo(smallDiamondX[i], smallDiamondY[i]);
}
gp.closePath();
gw2.setColor(dPrimeTable[x][y].getColor());
gw2.fill(gp);
}
}
noImage = false;
}
g.drawImage(worldmap,visRect.x,
visRect.y + visRect.height - worldmap.getHeight(),
this);
worldmapRect = new Rectangle(visRect.x,
visRect.y+visRect.height-worldmap.getHeight(),
worldmap.getWidth(),
worldmap.getHeight());
//draw the outline of the viewport
g.setColor(Color.BLACK);
double hRatio = ir.getWidth()/pref.getWidth();
double vRatio = ir.getHeight()/pref.getHeight();
int hBump = worldmap.getWidth()-ir.width;
int vBump = worldmap.getHeight()-ir.height;
//bump a few pixels to avoid drawing on the border
g.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x,
(int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()),
(int)(visRect.width*hRatio),
(int)(visRect.height*vRatio));
}
}
| public void paintComponent(Graphics g){
Graphics2D g2 = (Graphics2D) g;
Dimension size = getSize();
Dimension pref = getPreferredSize();
Rectangle visRect = getVisibleRect();
/*
boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1);
if (boxSize < 12){boxSize=12;}
if (boxSize < 25){
printDetails = false;
boxRadius = boxSize/2;
}else{
boxRadius = boxSize/2 - 1;
}
*/
//okay so this dumb if block is to prevent the ugly repainting
//bug when loading markers after the data are already being displayed,
//results in a little off-centering for small datasets, but not too bad.
//clickxshift and clickyshift are used later to translate from x,y coords
//to the pair of markers comparison at those coords
if (!(markersLoaded)){
g2.translate((size.width - pref.width) / 2,
(size.height - pref.height) / 2);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top + (size.height - pref.height)/2;
} else {
g2.translate((size.width - pref.width) / 2,
0);
clickXShift = left + (size.width-pref.width)/2;
clickYShift = top;
}
FontMetrics boxFontMetrics = g.getFontMetrics(boxFont);
int diamondX[] = new int[4];
int diamondY[] = new int[4];
Polygon diamond;
left = H_BORDER;
top = V_BORDER;
FontMetrics metrics;
int ascent;
g2.setColor(this.getBackground());
g2.fillRect(0,0,pref.width,pref.height);
g2.setColor(Color.BLACK);
BasicStroke thickerStroke = new BasicStroke(1);
BasicStroke thinnerStroke = new BasicStroke(0.25f);
BasicStroke fatStroke = new BasicStroke(3.0f);
if (markersLoaded) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
//// draw the marker locations
int wide = (dPrimeTable.length-1) * boxSize;
//TODO: talk to kirby about locusview scaling gizmo
int lineLeft = wide/20;
int lineSpan = (wide/10)*9;
long minpos = Chromosome.getMarker(0).getPosition();
long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition();
double spanpos = maxpos - minpos;
g2.setStroke(thinnerStroke);
g2.setColor(Color.white);
g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
g2.setColor(Color.black);
g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT);
for (int i = 0; i < Chromosome.getFilteredSize(); i++) {
double pos = (Chromosome.getFilteredMarker(i).getPosition() - minpos) / spanpos;
int xx = (int) (left + lineLeft + lineSpan*pos);
g2.setStroke(thickerStroke);
g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT);
g2.setStroke(thinnerStroke);
g.drawLine(xx, 5 + TICK_HEIGHT,
left + i*boxSize, TICK_BOTTOM);
}
top += TICK_BOTTOM;
//// draw the marker names
if (printDetails){
g.setFont(markerNameFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
widestMarkerName = metrics.stringWidth(Chromosome.getFilteredMarker(0).getName());
for (int x = 1; x < dPrimeTable.length; x++) {
int thiswide = metrics.stringWidth(Chromosome.getFilteredMarker(x).getName());
if (thiswide > widestMarkerName) widestMarkerName = thiswide;
}
g2.translate(left, top + widestMarkerName);
g2.rotate(-Math.PI / 2.0);
for (int x = 0; x < dPrimeTable.length; x++) {
g2.drawString(Chromosome.getFilteredMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3);
}
g2.rotate(Math.PI / 2.0);
g2.translate(-left, -(top + widestMarkerName));
// move everybody down
top += widestMarkerName + TEXT_NUMBER_GAP;
}
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
//// draw the marker numbers
if (printDetails){
g.setFont(markerNumFont);
metrics = g.getFontMetrics();
ascent = metrics.getAscent();
for (int x = 0; x < dPrimeTable.length; x++) {
String mark = String.valueOf(Chromosome.realIndex[x] + 1);
g.drawString(mark,
left + x*boxSize - metrics.stringWidth(mark)/2,
top + ascent);
}
top += boxRadius/2; // give a little space between numbers and boxes
}
//the following values are the bounds on the boxes we want to
//display given that the current window is 'visRect'
lowX = (visRect.x-clickXShift-(visRect.y +
visRect.height-clickYShift))/boxSize;
if (lowX < 0) {
lowX = 0;
}
highX = ((visRect.x + visRect.width)/boxSize)+1;
if (highX > dPrimeTable.length-1){
highX = dPrimeTable.length-1;
}
lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize;
if (lowY < lowX+1){
lowY = lowX+1;
}
highY = (((visRect.x-clickXShift+visRect.width) +
(visRect.y-clickYShift+visRect.height))/boxSize)+1;
if (highY > dPrimeTable.length){
highY = dPrimeTable.length;
}
// draw table column by column
for (int x = lowX; x < highX; x++) {
//always draw the fewest possible boxes
if (lowY < x+1){
lowY = x+1;
}
for (int y = lowY; y < highY; y++) {
if (dPrimeTable[x][y] == null){
continue;
}
double d = dPrimeTable[x][y].getDPrime();
//double l = dPrimeTable[x][y].getLOD();
Color boxColor = dPrimeTable[x][y].getColor();
// draw markers above
int xx = left + (x + y) * boxSize / 2;
int yy = top + (y - x) * boxSize / 2;
diamondX[0] = xx; diamondY[0] = yy - boxRadius;
diamondX[1] = xx + boxRadius; diamondY[1] = yy;
diamondX[2] = xx; diamondY[2] = yy + boxRadius;
diamondX[3] = xx - boxRadius; diamondY[3] = yy;
diamond = new Polygon(diamondX, diamondY, 4);
g.setColor(boxColor);
g.fillPolygon(diamond);
if (boxColor == Color.white) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(Color.lightGray);
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_OFF);
}
if(printDetails){
g.setFont(boxFont);
ascent = boxFontMetrics.getAscent();
int val = (int) (d * 100);
g.setColor((val < 50) ? Color.gray : Color.black);
if (val != 100) {
String valu = String.valueOf(val);
int widf = boxFontMetrics.stringWidth(valu);
g.drawString(valu, xx - widf/2, yy + ascent/2);
}
}
}
}
//highlight blocks
boolean even = true;
//g.setColor(new Color(153,255,153));
g.setColor(new Color(51,153,51));
g2.setStroke(fatStroke);
for (int i = 0; i < blocks.size(); i++){
int[] theBlock = (int[])blocks.elementAt(i);
int first = theBlock[0];
int last = theBlock[theBlock.length-1];
g.drawLine(left + (2*first + 1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius);
g.drawLine(left + (first + last) * boxSize/2,
top + (last - first) * boxSize/2 + boxRadius,
left + (2*last - 1) * boxSize/2+boxRadius,
top + boxSize/2);
for (int j = 0; j < theBlock.length; j++){
g.drawLine(left + (2*theBlock[j]+1) * boxSize/2 - boxRadius,
top + boxSize/2,
left + (2*theBlock[j]+1) * boxSize/2,
top + boxSize/2 - boxRadius);
g.drawLine (left + (2*theBlock[j]) * boxSize/2 - boxRadius,
top + boxSize/2 - boxRadius,
left + (2*theBlock[j]) * boxSize/2,
top + boxSize/2);
}
}
g2.setStroke(thickerStroke);
if (pref.getWidth() > (2*visRect.width)){
//dataset is big enough to require worldmap
if (noImage){
//first time through draw a worldmap if dataset is big:
final int WM_MAX_WIDTH = visRect.width/3;
double scalefactor;
scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH;
CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(),
BorderFactory.createLoweredBevelBorder());
worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2,
(int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2,
BufferedImage.TYPE_3BYTE_BGR);
Graphics gw = worldmap.getGraphics();
Graphics2D gw2 = (Graphics2D)(gw);
gw2.setColor(this.getBackground());
gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2);
//make a pretty border
gw2.setColor(Color.BLACK);
wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1);
ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1);
double prefBoxSize = boxSize/scalefactor;
float[] smallDiamondX = new float[4];
float[] smallDiamondY = new float[4];
GeneralPath gp;
for (int x = 0; x < dPrimeTable.length-1; x++){
for (int y = x+1; y < dPrimeTable.length; y++){
if (dPrimeTable[x][y] == null){
continue;
}
double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left;
double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top;
smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2);
smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy;
smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2);
smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy;
gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length);
gp.moveTo(smallDiamondX[0],smallDiamondY[0]);
for (int i = 1; i < smallDiamondX.length; i++){
gp.lineTo(smallDiamondX[i], smallDiamondY[i]);
}
gp.closePath();
gw2.setColor(dPrimeTable[x][y].getColor());
gw2.fill(gp);
}
}
noImage = false;
}
g.drawImage(worldmap,visRect.x,
visRect.y + visRect.height - worldmap.getHeight(),
this);
worldmapRect = new Rectangle(visRect.x,
visRect.y+visRect.height-worldmap.getHeight(),
worldmap.getWidth(),
worldmap.getHeight());
//draw the outline of the viewport
g.setColor(Color.BLACK);
double hRatio = ir.getWidth()/pref.getWidth();
double vRatio = ir.getHeight()/pref.getHeight();
int hBump = worldmap.getWidth()-ir.width;
int vBump = worldmap.getHeight()-ir.height;
//bump a few pixels to avoid drawing on the border
g.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x,
(int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()),
(int)(visRect.width*hRatio),
(int)(visRect.height*vRatio));
}
}
|
diff --git a/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java b/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
index b1a74f03..929a6659 100644
--- a/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
+++ b/src/org/geworkbench/parsers/GeoSeriesMatrixParser.java
@@ -1,408 +1,407 @@
package org.geworkbench.parsers;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import javax.swing.ProgressMonitorInputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geworkbench.bison.datastructure.biocollections.microarrays.CSExprMicroarraySet;
import org.geworkbench.bison.datastructure.biocollections.microarrays.DSMicroarraySet;
import org.geworkbench.bison.datastructure.bioobjects.markers.CSExpressionMarker;
import org.geworkbench.bison.datastructure.bioobjects.markers.DSGeneMarker;
import org.geworkbench.bison.datastructure.bioobjects.markers.annotationparser.AnnotationParser;
import org.geworkbench.bison.datastructure.bioobjects.microarray.CSExpressionMarkerValue;
import org.geworkbench.bison.datastructure.bioobjects.microarray.CSMicroarray;
import org.geworkbench.bison.datastructure.bioobjects.microarray.DSMicroarray;
/**
* @author Nikhil
* @version $Id$
*/
public class GeoSeriesMatrixParser {
static Log log = LogFactory.getLog(SOFTFileFormat.class);
private static final String commentSign1 = "#";
private static final String commentSign2 = "!";
private static final String commentSign3 = "^";
private static final String columnSeperator = "\t";
private static final String duplicateLabelModificator = "_2";
CSExprMicroarraySet maSet = new CSExprMicroarraySet();
List<String> markArrays = new ArrayList<String>();
private int possibleMarkers = 0;
transient private String errorMessage;
/*
* (non-Javadoc)
* @see org.geworkbench.components.parsers.FileFormat#checkFormat(java.io.File)
*/
public boolean checkFormat(File file) throws InterruptedIOException {
BufferedReader reader = null;
ProgressMonitorInputStream progressIn = null;
try {
FileInputStream fileIn = new FileInputStream(file);
progressIn = new ProgressMonitorInputStream(
null, "Loading data from " + file.getName(), fileIn);
reader = new BufferedReader(new InputStreamReader(
progressIn));
String line = null;
int totalColumns = 0;
List<String> markers = new ArrayList<String>();
List<String> arrays = new ArrayList<String>();
int lineIndex = 0;
int headerLineIndex = 0;
while ((line = reader.readLine()) != null) { // for each line
/*
* Adding comments that start with '!' and '#' from the GEO SOFT file to the Experiment Information tab
*/
if (line.startsWith(commentSign1) || line.startsWith(commentSign2)) {
//Ignoring the lines that has '!series_matrix_table_begin' and '!series_matrix_table_end'
if(!line.equalsIgnoreCase("!series_matrix_table_begin") && !line.equalsIgnoreCase("!series_matrix_table_end")) {
maSet.addDescription(line.substring(1));
}
}
String[] mark = line.split("\t");
if(mark[0].equals("!Sample_title")){
for (int i=1;i<mark.length;i++){
markArrays.add(mark[i]);
}
}
if ((line.indexOf(commentSign1) < 0)
&& (line.indexOf(commentSign2) != 0)
&& (line.indexOf(commentSign3) != 0)
&& (line.length() > 0)) {// we'll skip comments and
// anything before header
if (headerLineIndex == 0) {
// no header detected yet, then
// this is the header.
headerLineIndex = lineIndex;
}
int columnIndex = 0;
int accessionIndex = 0;
String[] tokens = line.split(columnSeperator);
for(String token: tokens) { // for each column
token = token.trim();
if ((headerLineIndex > 0) && (columnIndex == 0)) {
/*
* if this line is after header, then first column
* should be our marker name
*/
if (markers.contains(token)) {// duplicate markers
log.error("Duplicate Markers: "+token);
errorMessage = "Duplicate Markers: "+token;
return false;
} else {
markers.add(token);
}
} else if (headerLineIndex == lineIndex) { // header
if (token.equals("")) {
accessionIndex = columnIndex;
} else if (arrays.contains(token)) {// duplicate arrays
log.error("Duplicate Arrays labels " + token
+ " in " + file.getName());
errorMessage = "Duplicate Arrays labels "
+ token + " in " + file.getName();
return false;
} else {
arrays.add(token);
}
}
columnIndex++;
lineIndex++;
} // end of the while loop parsing one line
/* check if column match or not */
if (headerLineIndex > 0) {
/*
* if this line is real data, we assume lines after
* header are real data. (we might have bug here)
*/
if (totalColumns == 0) { /* not been set yet */
totalColumns = columnIndex - accessionIndex;
} else if (columnIndex != totalColumns){ // if not equal
errorMessage = "Columns do not match: columnIndex="+columnIndex+" totalColumns="+totalColumns+" lineIndex="+lineIndex;
return false;
}
}
} // end of if block for one line
} // end of while loop of read line
possibleMarkers = markers.size();
fileIn.close();
} catch (java.io.InterruptedIOException ie) {
if ( progressIn.getProgressMonitor().isCanceled())
{
throw ie;
}
else {
ie.printStackTrace();
}
} catch (Exception e) {
log.error("GEO SOFT check file format exception: " + e);
e.printStackTrace();
errorMessage = "GEO SOFT check file format exception: " + e;
return false;
} finally {
try {
reader.close();
} catch (IOException e) {
// no-op
e.printStackTrace();
}
}
return true;
}
/*
* (non-Javadoc)
*
* @see org.geworkbench.components.parsers.FileFormat#getMArraySet(java.io.File)
*/
public DSMicroarraySet<DSMicroarray> getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
int dotIndex = fileName.lastIndexOf(extSeperater);
if (dotIndex != -1) {
fileName = fileName.substring(0, dotIndex);
}
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
int duplicateLabels = 0;
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ " ("
+markAnn1
+")";
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1, null, null, false,
DSMicroarraySet.affyTxtType);
maSet.add(array);
- System.out.print("\n");
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
duplicateLabels++;
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkerVector().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
maSet.get(i).setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
maSet.get(i).setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AnnotationParser.matchChipType(maSet, maSet
.getMarkerVector().get(i).getLabel(), false);
if (result != null) {
break;
}
}
if (result == null) {
AnnotationParser.matchChipType(maSet, "Unknown", true);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkerVector()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
}
| true | true | public DSMicroarraySet<DSMicroarray> getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
int dotIndex = fileName.lastIndexOf(extSeperater);
if (dotIndex != -1) {
fileName = fileName.substring(0, dotIndex);
}
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
int duplicateLabels = 0;
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ " ("
+markAnn1
+")";
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1, null, null, false,
DSMicroarraySet.affyTxtType);
maSet.add(array);
System.out.print("\n");
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
duplicateLabels++;
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkerVector().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
maSet.get(i).setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
maSet.get(i).setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AnnotationParser.matchChipType(maSet, maSet
.getMarkerVector().get(i).getLabel(), false);
if (result != null) {
break;
}
}
if (result == null) {
AnnotationParser.matchChipType(maSet, "Unknown", true);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkerVector()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
| public DSMicroarraySet<DSMicroarray> getMArraySet(File file)
throws InputFileFormatException, InterruptedIOException {
final int extSeperater = '.';
if (!checkFormat(file)) {
log
.info("SOFTFileFormat::getMArraySet - "
+ "Attempting to open a file that does not comply with the "
+ "GEO SOFT file format.");
throw new InputFileFormatException(errorMessage);
}
String fileName = file.getName();
int dotIndex = fileName.lastIndexOf(extSeperater);
if (dotIndex != -1) {
fileName = fileName.substring(0, dotIndex);
}
maSet.setLabel(fileName);
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(file));
if (in != null) {
String header = in.readLine();
if (header == null) {
throw new InputFileFormatException("File is empty.");
}
while (header != null
&& (header.startsWith(commentSign1) || header
.startsWith(commentSign2) || header
.startsWith(commentSign3))
|| StringUtils.isEmpty(header)) {
header = in.readLine();
}
if (header == null) {
throw new InputFileFormatException(
"File is empty or consists of only comments.\n"
+ "SOFT File Format expected");
}
header = StringUtils.replace(header, "\"", "");
StringTokenizer headerTokenizer = new StringTokenizer(header,
columnSeperator, false);
int n = headerTokenizer.countTokens();
if (n <= 1) {
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the SOFT File format.\n"
+ "Invalid header: " + header);
}
n -= 1;
String line = in.readLine();
line = StringUtils.replace(line, "\"", "");
int m = 0;
/* Skip first token */
headerTokenizer.nextToken();
int duplicateLabels = 0;
for (int i = 0; i < n; i++) {
String arrayName = headerTokenizer.nextToken();
String markAnn = markArrays.get(i);
String markAnn1 = markAnn.replace("\"", "");
String arrayName1 = arrayName
+ " ("
+markAnn1
+")";
CSMicroarray array = new CSMicroarray(i, possibleMarkers,
arrayName1, null, null, false,
DSMicroarraySet.affyTxtType);
maSet.add(array);
if (maSet.size() != (i + 1)) {
log.info("We got a duplicate label of array");
array.setLabel(array.getLabel()
+ duplicateLabelModificator);
maSet.add(array);
duplicateLabels++;
}
}
while ((line != null)
&& (!StringUtils.isEmpty(line))
&& (!line.trim().startsWith(commentSign2))) {
String[] tokens = line.split(columnSeperator);
int length = tokens.length;
if (length != (n + 1)) {
log.error("Warning: Could not parse line #" + (m + 1)
+ ". Line should have " + (n + 1)
+ " lines, has " + length + ".");
if ((m == 0) && (length == n + 2))
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT file format."
+ "\n"
+ "Warning: Could not parse line #"
+ (m + 1)
+ ". Line should have "
+ (n + 1)
+ " columns, but it has "
+ length
+ ".\n"
+ "This file looks like R's SOFT format, which needs manually add a tab in the beginning of the header to make it a valid SOFT format.");
else
throw new InputFileFormatException(
"Attempting to open a file that does not comply with the "
+ "SOFT format." + "\n"
+ "Warning: Could not parse line #"
+ (m + 1) + ". Line should have "
+ (n + 1) + " columns, but it has "
+ length + ".");
}
String markerName = new String(tokens[0].trim());
CSExpressionMarker marker = new CSExpressionMarker(m);
marker.setLabel(markerName);
maSet.getMarkerVector().add(m, marker);
for (int i = 0; i < n; i++) {
String valString = "";
if ((i + 1) < tokens.length) {
valString = tokens[i + 1];
}
if (valString.trim().length() == 0) {
// put values directly into CSMicroarray inside of
// maSet
Float v = Float.NaN;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(v);
maSet.get(i).setMarkerValue(m, markerValue);
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
} else {
float value = Float.NaN;
try {
value = Float.parseFloat(valString);
} catch (NumberFormatException nfe) {
}
// put values directly into CSMicroarray inside of
// maSet
Float v = value;
CSExpressionMarkerValue markerValue = new CSExpressionMarkerValue(
v);
try {
maSet.get(i).setMarkerValue(m, markerValue);
} catch (IndexOutOfBoundsException ioobe) {
log.error("i=" + i + ", m=" + m);
}
if (v.isNaN()) {
markerValue.setMissing(true);
} else {
markerValue.setPresent();
}
}
}
m++;
line = in.readLine();
line = StringUtils.replace(line, "\"", "");
}
// Set chip-type
String result = null;
for (int i = 0; i < m; i++) {
result = AnnotationParser.matchChipType(maSet, maSet
.getMarkerVector().get(i).getLabel(), false);
if (result != null) {
break;
}
}
if (result == null) {
AnnotationParser.matchChipType(maSet, "Unknown", true);
} else {
maSet.setCompatibilityLabel(result);
}
for (DSGeneMarker marker : maSet.getMarkerVector()) {
String token = marker.getLabel();
String[] locusResult = AnnotationParser.getInfo(token,
AnnotationParser.LOCUSLINK);
String locus = "";
if ((locusResult != null)
&& (!locusResult[0].trim().equals(""))) {
locus = locusResult[0].trim();
}
if (locus.compareTo("") != 0) {
try {
marker.setGeneId(Integer.parseInt(locus));
} catch (NumberFormatException e) {
log.info("Couldn't parse locus id: " + locus);
}
}
String[] geneNames = AnnotationParser.getInfo(token,
AnnotationParser.ABREV);
if (geneNames != null) {
marker.setGeneName(geneNames[0]);
}
marker.getUnigene().set(token);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
return null;
} finally {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return maSet;
}
|
diff --git a/src/com.gluster.storage.management.gui/src/com/gluster/storage/management/gui/views/details/VolumeOptionsPage.java b/src/com.gluster.storage.management.gui/src/com/gluster/storage/management/gui/views/details/VolumeOptionsPage.java
index 1386b037..22d38e50 100644
--- a/src/com.gluster.storage.management.gui/src/com/gluster/storage/management/gui/views/details/VolumeOptionsPage.java
+++ b/src/com.gluster.storage.management.gui/src/com/gluster/storage/management/gui/views/details/VolumeOptionsPage.java
@@ -1,358 +1,359 @@
/*******************************************************************************
* Copyright (c) 2011 Gluster, Inc. <http://www.gluster.com>
* This file is part of Gluster Management Console.
*
* Gluster Management Console is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Gluster Management Console is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
* for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*******************************************************************************/
package com.gluster.storage.management.gui.views.details;
import java.util.List;
import java.util.Map.Entry;
import org.apache.commons.lang.WordUtils;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ColumnLabelProvider;
import org.eclipse.jface.viewers.ColumnLayoutData;
import org.eclipse.jface.viewers.ColumnViewerToolTipSupport;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.FormToolkit;
import com.gluster.storage.management.client.GlusterDataModelManager;
import com.gluster.storage.management.core.constants.CoreConstants;
import com.gluster.storage.management.core.model.DefaultClusterListener;
import com.gluster.storage.management.core.model.Event;
import com.gluster.storage.management.core.model.Event.EVENT_TYPE;
import com.gluster.storage.management.core.model.Volume;
import com.gluster.storage.management.core.model.VolumeOptionInfo;
import com.gluster.storage.management.gui.VolumeOptionsTableLabelProvider;
import com.gluster.storage.management.gui.utils.GUIHelper;
public class VolumeOptionsPage extends Composite {
private final FormToolkit toolkit = new FormToolkit(Display.getCurrent());
private TableViewer tableViewer;
private GUIHelper guiHelper = GUIHelper.getInstance();
private Volume volume;
private DefaultClusterListener clusterListener;
private Text filterText;
private List<VolumeOptionInfo> defaultVolumeOptions = GlusterDataModelManager.getInstance()
.getVolumeOptionsDefaults();
public enum OPTIONS_TABLE_COLUMN_INDICES {
OPTION_KEY, OPTION_VALUE
};
private static final String[] OPTIONS_TABLE_COLUMN_NAMES = new String[] { "Option Key", "Option Value" };
private Button addTopButton;
private Button addBottomButton;
private TableViewerColumn keyColumn;
private OptionKeyEditingSupport keyEditingSupport;
public VolumeOptionsPage(final Composite parent, int style, Volume volume) {
super(parent, style);
this.volume = volume;
toolkit.adapt(this);
toolkit.paintBordersFor(this);
setupPageLayout();
filterText = guiHelper.createFilterText(toolkit, this);
addTopButton = createAddButton();
setupOptionsTableViewer(filterText);
addBottomButton = createAddButton();
if (defaultVolumeOptions.size() == volume.getOptions().size()) {
setAddButtonsEnabled(false);
}
tableViewer.setInput(volume.getOptions().entrySet());
parent.layout(); // Important - this actually paints the table
registerListeners(parent);
}
private void setAddButtonsEnabled(boolean enable) {
addTopButton.setEnabled(enable);
addBottomButton.setEnabled(enable);
}
private Button createAddButton() {
Button button = toolkit.createButton(this, "&Add", SWT.FLAT);
button.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// add an empty option to be filled up by user
volume.setOption("", "");
tableViewer.refresh();
tableViewer.setSelection(new StructuredSelection(getEntry("")));
keyColumn.getViewer().editElement(getEntry(""), 0); // edit newly created entry
// disable the add button AND search filter textbox till user fills up the new option
setAddButtonsEnabled(false);
filterText.setEnabled(false);
}
private Entry<String, String> getEntry(String key) {
for (Entry<String, String> entry : volume.getOptions().entrySet()) {
if (entry.getKey().equals(key)) {
return entry;
}
}
return null;
}
});
// Make sure that add button is enabled only when search filter textbox is empty
filterText.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
if (filterText.getText().length() > 0) {
setAddButtonsEnabled(false);
} else {
if (defaultVolumeOptions.size() == volume.getOptions().size()) {
setAddButtonsEnabled(false);
} else {
setAddButtonsEnabled(true);
}
}
}
});
return button;
}
private void registerListeners(final Composite parent) {
addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
volume.getOptions().remove(keyEditingSupport.getEntryBeingAdded().getKey());
}
GlusterDataModelManager.getInstance().removeClusterListener(clusterListener);
toolkit.dispose();
}
});
/**
* Ideally not required. However the table viewer is not getting laid out properly on performing
* "maximize + restore" So this is a hack to make sure that the table is laid out again on re-size of the window
*/
addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
parent.layout();
}
});
parent.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
Entry<String, String> entryBeingAdded = keyEditingSupport.getEntryBeingAdded();
volume.getOptions().remove(entryBeingAdded.getKey());
}
}
});
clusterListener = new DefaultClusterListener() {
@SuppressWarnings("unchecked")
@Override
public void volumeChanged(Volume volume, Event event) {
super.volumeChanged(volume, event);
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTIONS_RESET) {
if (!tableViewer.getControl().isDisposed()) {
tableViewer.refresh();
+ setAddButtonsEnabled(true);
}
}
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTION_SET) {
Entry<String, String> eventEntry = (Entry<String, String>) event.getEventData();
if (isNewOption(volume, eventEntry.getKey())) {
// option has been set successfully by the user. re-enable the add button and search filter
// textbox
setAddButtonsEnabled(true);
filterText.setEnabled(true);
}
if (defaultVolumeOptions.size() == volume.getOptions().size()) {
setAddButtonsEnabled(false);
}
if (tableViewer.getTable().getItemCount() < volume.getOptions().size()) {
// new volume set from outside this page. refresh the viewer.
tableViewer.refresh();
} else {
// existing volume option value changed. update that element.
tableViewer.update(eventEntry, null);
}
}
}
private boolean isNewOption(Volume volume, String optionKey) {
if (filterText.getText().length() > 0) {
// user has been filtering the contents. adding new option is allowed only when contents are NOT
// filtered. Thus it's impossible that this is a newly added option
return false;
}
// if this is the last option in the volume options, it must be the new option
return optionKey.equals(volume.getOptions().keySet().toArray()[volume.getOptions().size() - 1]);
}
};
GlusterDataModelManager.getInstance().addClusterListener(clusterListener);
}
private void setupPageLayout() {
final GridLayout layout = new GridLayout(1, false);
layout.verticalSpacing = 10;
layout.marginTop = 10;
setLayout(layout);
}
private void setupOptionsTable(Composite parent) {
Table table = tableViewer.getTable();
table.setHeaderVisible(true);
table.setLinesVisible(false);
TableColumnLayout tableColumnLayout = createTableColumnLayout();
parent.setLayout(tableColumnLayout);
setColumnProperties(table, OPTIONS_TABLE_COLUMN_INDICES.OPTION_KEY, SWT.CENTER, 100);
setColumnProperties(table, OPTIONS_TABLE_COLUMN_INDICES.OPTION_VALUE, SWT.CENTER, 100);
}
private TableColumnLayout createTableColumnLayout() {
TableColumnLayout tableColumnLayout = new TableColumnLayout();
ColumnLayoutData defaultColumnLayoutData = new ColumnWeightData(100);
tableColumnLayout.setColumnData(createKeyColumn(), defaultColumnLayoutData);
tableColumnLayout.setColumnData(createValueColumn(), defaultColumnLayoutData);
return tableColumnLayout;
}
private TableColumn createValueColumn() {
TableViewerColumn valueColumn = new TableViewerColumn(tableViewer, SWT.NONE);
valueColumn.getColumn()
.setText(OPTIONS_TABLE_COLUMN_NAMES[OPTIONS_TABLE_COLUMN_INDICES.OPTION_VALUE.ordinal()]);
valueColumn.setLabelProvider(new ColumnLabelProvider() {
@SuppressWarnings("unchecked")
@Override
public String getText(Object element) {
return ((Entry<String, String>) element).getValue();
}
});
// User can edit value of a volume option
valueColumn.setEditingSupport(new OptionValueEditingSupport(valueColumn.getViewer(), volume));
return valueColumn.getColumn();
}
private TableColumn createKeyColumn() {
keyColumn = new TableViewerColumn(tableViewer, SWT.NONE);
keyColumn.getColumn().setText(OPTIONS_TABLE_COLUMN_NAMES[OPTIONS_TABLE_COLUMN_INDICES.OPTION_KEY.ordinal()]);
keyColumn.setLabelProvider(new ColumnLabelProvider() {
@SuppressWarnings("unchecked")
@Override
public String getText(Object element) {
return ((Entry<String, String>) element).getKey();
}
@SuppressWarnings("unchecked")
@Override
public String getToolTipText(Object element) {
String key = ((Entry<String, String>) element).getKey();
if (key.isEmpty()) {
return "Click to select a volume option key";
}
VolumeOptionInfo optionInfo = GlusterDataModelManager.getInstance().getVolumeOptionInfo(key);
// Wrap the description before adding to tooltip so that long descriptions are displayed properly
return WordUtils.wrap(optionInfo.getDescription(), 60) + CoreConstants.NEWLINE + "Default value: "
+ optionInfo.getDefaultValue();
}
});
// Editing support required when adding new key
keyEditingSupport = new OptionKeyEditingSupport(keyColumn.getViewer(), volume);
keyColumn.setEditingSupport(keyEditingSupport);
return keyColumn.getColumn();
}
private void createOptionsTableViewer(Composite parent) {
tableViewer = new TableViewer(parent, SWT.FLAT | SWT.FULL_SELECTION | SWT.SINGLE);
tableViewer.setLabelProvider(new VolumeOptionsTableLabelProvider());
tableViewer.setContentProvider(new ArrayContentProvider());
tableViewer.getTable().setLinesVisible(true);
setupOptionsTable(parent);
}
private Composite createTableViewerComposite() {
Composite tableViewerComposite = new Composite(this, SWT.NO);
tableViewerComposite.setLayout(new FillLayout(SWT.HORIZONTAL));
tableViewerComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
return tableViewerComposite;
}
private void setupOptionsTableViewer(final Text filterText) {
Composite tableViewerComposite = createTableViewerComposite();
createOptionsTableViewer(tableViewerComposite);
ColumnViewerToolTipSupport.enableFor(tableViewer);
// Create a case insensitive filter for the table viewer using the filter text field
guiHelper.createFilter(tableViewer, filterText, false);
}
private void setColumnProperties(Table table, OPTIONS_TABLE_COLUMN_INDICES columnIndex, int alignment, int weight) {
TableColumn column = table.getColumn(columnIndex.ordinal());
column.setAlignment(alignment);
TableColumnLayout tableColumnLayout = (TableColumnLayout) table.getParent().getLayout();
tableColumnLayout.setColumnData(column, new ColumnWeightData(weight));
}
}
| true | true | private void registerListeners(final Composite parent) {
addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
volume.getOptions().remove(keyEditingSupport.getEntryBeingAdded().getKey());
}
GlusterDataModelManager.getInstance().removeClusterListener(clusterListener);
toolkit.dispose();
}
});
/**
* Ideally not required. However the table viewer is not getting laid out properly on performing
* "maximize + restore" So this is a hack to make sure that the table is laid out again on re-size of the window
*/
addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
parent.layout();
}
});
parent.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
Entry<String, String> entryBeingAdded = keyEditingSupport.getEntryBeingAdded();
volume.getOptions().remove(entryBeingAdded.getKey());
}
}
});
clusterListener = new DefaultClusterListener() {
@SuppressWarnings("unchecked")
@Override
public void volumeChanged(Volume volume, Event event) {
super.volumeChanged(volume, event);
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTIONS_RESET) {
if (!tableViewer.getControl().isDisposed()) {
tableViewer.refresh();
}
}
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTION_SET) {
Entry<String, String> eventEntry = (Entry<String, String>) event.getEventData();
if (isNewOption(volume, eventEntry.getKey())) {
// option has been set successfully by the user. re-enable the add button and search filter
// textbox
setAddButtonsEnabled(true);
filterText.setEnabled(true);
}
if (defaultVolumeOptions.size() == volume.getOptions().size()) {
setAddButtonsEnabled(false);
}
if (tableViewer.getTable().getItemCount() < volume.getOptions().size()) {
// new volume set from outside this page. refresh the viewer.
tableViewer.refresh();
} else {
// existing volume option value changed. update that element.
tableViewer.update(eventEntry, null);
}
}
}
private boolean isNewOption(Volume volume, String optionKey) {
if (filterText.getText().length() > 0) {
// user has been filtering the contents. adding new option is allowed only when contents are NOT
// filtered. Thus it's impossible that this is a newly added option
return false;
}
// if this is the last option in the volume options, it must be the new option
return optionKey.equals(volume.getOptions().keySet().toArray()[volume.getOptions().size() - 1]);
}
};
GlusterDataModelManager.getInstance().addClusterListener(clusterListener);
}
| private void registerListeners(final Composite parent) {
addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
volume.getOptions().remove(keyEditingSupport.getEntryBeingAdded().getKey());
}
GlusterDataModelManager.getInstance().removeClusterListener(clusterListener);
toolkit.dispose();
}
});
/**
* Ideally not required. However the table viewer is not getting laid out properly on performing
* "maximize + restore" So this is a hack to make sure that the table is laid out again on re-size of the window
*/
addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
parent.layout();
}
});
parent.addDisposeListener(new DisposeListener() {
@Override
public void widgetDisposed(DisposeEvent e) {
if (!(addTopButton.isEnabled() || addBottomButton.isEnabled())) {
// user has selected key, but not added value. Since this is not a valid entry,
// remove the last option (without value) from the volume
Entry<String, String> entryBeingAdded = keyEditingSupport.getEntryBeingAdded();
volume.getOptions().remove(entryBeingAdded.getKey());
}
}
});
clusterListener = new DefaultClusterListener() {
@SuppressWarnings("unchecked")
@Override
public void volumeChanged(Volume volume, Event event) {
super.volumeChanged(volume, event);
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTIONS_RESET) {
if (!tableViewer.getControl().isDisposed()) {
tableViewer.refresh();
setAddButtonsEnabled(true);
}
}
if (event.getEventType() == EVENT_TYPE.VOLUME_OPTION_SET) {
Entry<String, String> eventEntry = (Entry<String, String>) event.getEventData();
if (isNewOption(volume, eventEntry.getKey())) {
// option has been set successfully by the user. re-enable the add button and search filter
// textbox
setAddButtonsEnabled(true);
filterText.setEnabled(true);
}
if (defaultVolumeOptions.size() == volume.getOptions().size()) {
setAddButtonsEnabled(false);
}
if (tableViewer.getTable().getItemCount() < volume.getOptions().size()) {
// new volume set from outside this page. refresh the viewer.
tableViewer.refresh();
} else {
// existing volume option value changed. update that element.
tableViewer.update(eventEntry, null);
}
}
}
private boolean isNewOption(Volume volume, String optionKey) {
if (filterText.getText().length() > 0) {
// user has been filtering the contents. adding new option is allowed only when contents are NOT
// filtered. Thus it's impossible that this is a newly added option
return false;
}
// if this is the last option in the volume options, it must be the new option
return optionKey.equals(volume.getOptions().keySet().toArray()[volume.getOptions().size() - 1]);
}
};
GlusterDataModelManager.getInstance().addClusterListener(clusterListener);
}
|
diff --git a/chapter01/live-querries/src/test/java/drools/cookbook/chapter01/LiveQuerriesTest.java b/chapter01/live-querries/src/test/java/drools/cookbook/chapter01/LiveQuerriesTest.java
index 702146c..15febdc 100644
--- a/chapter01/live-querries/src/test/java/drools/cookbook/chapter01/LiveQuerriesTest.java
+++ b/chapter01/live-querries/src/test/java/drools/cookbook/chapter01/LiveQuerriesTest.java
@@ -1,151 +1,151 @@
package drools.cookbook.chapter01;
import static org.junit.Assert.assertEquals;
import java.util.Comparator;
import org.drools.KnowledgeBase;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderError;
import org.drools.builder.KnowledgeBuilderFactory;
import org.drools.builder.ResourceType;
import org.drools.io.impl.ClassPathResource;
import org.drools.runtime.StatefulKnowledgeSession;
import org.drools.runtime.rule.FactHandle;
import org.drools.runtime.rule.LiveQuery;
import org.drools.runtime.rule.Row;
import org.junit.Test;
import ca.odell.glazedlists.SortedList;
import drools.cookbook.chapter01.listener.CustomViewChangedEventListener;
import drools.cookbook.chapter01.listener.GlazedListViewChangedEventListener;
/**
*
* @author Lucas Amador
*
*/
public class LiveQuerriesTest {
@Test
public void serverCpuUsageQuery() {
StatefulKnowledgeSession ksession = createKnowledgeSession();
Server winServer = new Server("winServer", 4, 4096, 2048, 25);
ksession.insert(winServer);
Server ubuntuServer = new Server("ubuntuServer", 4, 2048, 1024, 70);
FactHandle ubuntuServerFactHandle = ksession.insert(ubuntuServer);
Server debianServer = new Server("debianServer", 4, 2048, 1024, 10);
ksession.insert(debianServer);
CustomViewChangedEventListener listener = new CustomViewChangedEventListener();
LiveQuery query = ksession.openLiveQuery("serverCpuUsage", new Object[]{20}, listener);
assertEquals(1, listener.getCurrentServers().size());
assertEquals(0, listener.getRemovedServers().size());
assertEquals(0, listener.getUpdatedServers().size());
ubuntuServer.setCpuUsage(10);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
assertEquals(2, listener.getCurrentServers().size());
assertEquals(0, listener.getRemovedServers().size());
assertEquals(0, listener.getUpdatedServers().size());
ubuntuServer.setCpuUsage(5);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
assertEquals(2, listener.getCurrentServers().size());
assertEquals(0, listener.getRemovedServers().size());
assertEquals(1, listener.getUpdatedServers().size());
query.close();
ksession.dispose();
}
@Test
public void serverCpuUsageGlazedListQuery() {
StatefulKnowledgeSession ksession = createKnowledgeSession();
Server winServer = new Server("winServer", 4, 4096, 2048, 25);
FactHandle winServerFactHandle = ksession.insert(winServer);
Server ubuntuServer = new Server("ubuntuServer", 4, 2048, 1024, 70);
FactHandle ubuntuServerFactHandle = ksession.insert(ubuntuServer);
Server debianServer = new Server("debianServer", 4, 2048, 1024, 10);
ksession.insert(debianServer);
GlazedListViewChangedEventListener listener = new GlazedListViewChangedEventListener();
LiveQuery query = ksession.openLiveQuery("serverCpuUsage", new Object[]{20}, listener);
SortedList<Row> serverSortedList = new SortedList<Row>(listener, new Comparator<Row>() {
public int compare(Row r1, Row r2) {
- Server server1 = (Server) r1.get("$serve");
+ Server server1 = (Server) r1.get("$server");
Server server2 = (Server) r2.get("$server");
return (server1.getCpuUsage() - server2.getCpuUsage());
}
});
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(13);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(13, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(5);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
winServer.setCpuUsage(0);
ksession.update(winServerFactHandle, winServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(0, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(5, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
assertEquals(10, ((Server)serverSortedList.get(2).get("$server")).getCpuUsage());
query.close();
ksession.dispose();
}
private StatefulKnowledgeSession createKnowledgeSession() {
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add(new ClassPathResource("rules.drl", getClass()), ResourceType.DRL);
if (kbuilder.hasErrors()) {
if (kbuilder.getErrors().size() > 0) {
for (KnowledgeBuilderError kerror : kbuilder.getErrors()) {
System.err.println(kerror);
}
}
}
KnowledgeBase kbase = kbuilder.newKnowledgeBase();
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
return ksession;
}
}
| true | true | public void serverCpuUsageGlazedListQuery() {
StatefulKnowledgeSession ksession = createKnowledgeSession();
Server winServer = new Server("winServer", 4, 4096, 2048, 25);
FactHandle winServerFactHandle = ksession.insert(winServer);
Server ubuntuServer = new Server("ubuntuServer", 4, 2048, 1024, 70);
FactHandle ubuntuServerFactHandle = ksession.insert(ubuntuServer);
Server debianServer = new Server("debianServer", 4, 2048, 1024, 10);
ksession.insert(debianServer);
GlazedListViewChangedEventListener listener = new GlazedListViewChangedEventListener();
LiveQuery query = ksession.openLiveQuery("serverCpuUsage", new Object[]{20}, listener);
SortedList<Row> serverSortedList = new SortedList<Row>(listener, new Comparator<Row>() {
public int compare(Row r1, Row r2) {
Server server1 = (Server) r1.get("$serve");
Server server2 = (Server) r2.get("$server");
return (server1.getCpuUsage() - server2.getCpuUsage());
}
});
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(13);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(13, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(5);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
winServer.setCpuUsage(0);
ksession.update(winServerFactHandle, winServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(0, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(5, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
assertEquals(10, ((Server)serverSortedList.get(2).get("$server")).getCpuUsage());
query.close();
ksession.dispose();
}
| public void serverCpuUsageGlazedListQuery() {
StatefulKnowledgeSession ksession = createKnowledgeSession();
Server winServer = new Server("winServer", 4, 4096, 2048, 25);
FactHandle winServerFactHandle = ksession.insert(winServer);
Server ubuntuServer = new Server("ubuntuServer", 4, 2048, 1024, 70);
FactHandle ubuntuServerFactHandle = ksession.insert(ubuntuServer);
Server debianServer = new Server("debianServer", 4, 2048, 1024, 10);
ksession.insert(debianServer);
GlazedListViewChangedEventListener listener = new GlazedListViewChangedEventListener();
LiveQuery query = ksession.openLiveQuery("serverCpuUsage", new Object[]{20}, listener);
SortedList<Row> serverSortedList = new SortedList<Row>(listener, new Comparator<Row>() {
public int compare(Row r1, Row r2) {
Server server1 = (Server) r1.get("$server");
Server server2 = (Server) r2.get("$server");
return (server1.getCpuUsage() - server2.getCpuUsage());
}
});
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(13);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(10, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(13, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
ubuntuServer.setCpuUsage(5);
ksession.update(ubuntuServerFactHandle, ubuntuServer);
winServer.setCpuUsage(0);
ksession.update(winServerFactHandle, winServer);
System.out.println("#######################################################################################");
for (Row row : serverSortedList) {
System.out.println(row.get("$server"));
}
assertEquals(0, ((Server)serverSortedList.get(0).get("$server")).getCpuUsage());
assertEquals(5, ((Server)serverSortedList.get(1).get("$server")).getCpuUsage());
assertEquals(10, ((Server)serverSortedList.get(2).get("$server")).getCpuUsage());
query.close();
ksession.dispose();
}
|
diff --git a/plugins/org.eclipse.birt.report.engine.emitter.config.postscript/src/org/eclipse/birt/report/engine/emitter/config/postscript/PostscriptEmitterDescriptor.java b/plugins/org.eclipse.birt.report.engine.emitter.config.postscript/src/org/eclipse/birt/report/engine/emitter/config/postscript/PostscriptEmitterDescriptor.java
index e0dbf16f8..dd2141616 100644
--- a/plugins/org.eclipse.birt.report.engine.emitter.config.postscript/src/org/eclipse/birt/report/engine/emitter/config/postscript/PostscriptEmitterDescriptor.java
+++ b/plugins/org.eclipse.birt.report.engine.emitter.config.postscript/src/org/eclipse/birt/report/engine/emitter/config/postscript/PostscriptEmitterDescriptor.java
@@ -1,317 +1,317 @@
/*******************************************************************************
* Copyright (c) 2008 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.engine.emitter.config.postscript;
import java.util.Locale;
import org.eclipse.birt.report.engine.api.IPDFRenderOption;
import org.eclipse.birt.report.engine.api.IRenderOption;
import org.eclipse.birt.report.engine.api.RenderOption;
import org.eclipse.birt.report.engine.emitter.config.AbstractConfigurableOptionObserver;
import org.eclipse.birt.report.engine.emitter.config.AbstractEmitterDescriptor;
import org.eclipse.birt.report.engine.emitter.config.ConfigurableOption;
import org.eclipse.birt.report.engine.emitter.config.IConfigurableOption;
import org.eclipse.birt.report.engine.emitter.config.IConfigurableOptionObserver;
import org.eclipse.birt.report.engine.emitter.config.IOptionValue;
import org.eclipse.birt.report.engine.emitter.config.OptionValue;
import org.eclipse.birt.report.engine.emitter.config.postscript.i18n.Messages;
import org.eclipse.birt.report.engine.emitter.postscript.PostscriptRenderOption;
/**
* This class is a descriptor of postscript emitter.
*/
public class PostscriptEmitterDescriptor extends AbstractEmitterDescriptor
{
private static final String FONT_SUBSTITUTION = "FontSubstitution";
private static final String BIDI_PROCESSING = "BIDIProcessing";
private static final String TEXT_WRAPPING = "TextWrapping";
private static final String CHART_DPI = "ChartDpi";
private IConfigurableOption[] options;
private Locale locale;
public PostscriptEmitterDescriptor( )
{
initOptions( );
}
public void setLocale( Locale locale )
{
if ( this.locale != locale )
{
this.locale = locale;
initOptions( );
}
}
private void initOptions( )
{
// Initializes the option for BIDIProcessing.
ConfigurableOption bidiProcessing = new ConfigurableOption(
BIDI_PROCESSING );
bidiProcessing
.setDisplayName( getMessage( "OptionDisplayValue.BidiProcessing" ) ); //$NON-NLS-1$
bidiProcessing.setDataType( IConfigurableOption.DataType.BOOLEAN );
bidiProcessing.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
bidiProcessing.setDefaultValue( Boolean.TRUE );
bidiProcessing.setToolTip( null );
bidiProcessing
.setDescription( getMessage( "OptionDescription.BidiProcessing" ) ); //$NON-NLS-1$
// Initializes the option for TextWrapping.
ConfigurableOption textWrapping = new ConfigurableOption( TEXT_WRAPPING );
textWrapping
.setDisplayName( getMessage( "OptionDisplayValue.TextWrapping" ) ); //$NON-NLS-1$
textWrapping.setDataType( IConfigurableOption.DataType.BOOLEAN );
textWrapping.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
textWrapping.setDefaultValue( Boolean.TRUE );
textWrapping.setToolTip( null );
textWrapping
.setDescription( getMessage( "OptionDescription.TextWrapping" ) ); //$NON-NLS-1$
// Initializes the option for fontSubstitution.
ConfigurableOption fontSubstitution = new ConfigurableOption(
FONT_SUBSTITUTION );
fontSubstitution
.setDisplayName( getMessage( "OptionDisplayValue.FontSubstitution" ) );
fontSubstitution.setDataType( IConfigurableOption.DataType.BOOLEAN );
fontSubstitution
.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
fontSubstitution.setDefaultValue( Boolean.TRUE );
fontSubstitution.setToolTip( null );
fontSubstitution
.setDescription( getMessage( "OptionDescription.FontSubstitution" ) ); //$NON-NLS-1$
// Initializes the option for PageOverFlow.
ConfigurableOption pageOverFlow = new ConfigurableOption(
IPDFRenderOption.PAGE_OVERFLOW );
pageOverFlow
.setDisplayName( getMessage( "OptionDisplayValue.PageOverFlow" ) ); //$NON-NLS-1$
pageOverFlow.setDataType( IConfigurableOption.DataType.INTEGER );
pageOverFlow.setDisplayType( IConfigurableOption.DisplayType.COMBO );
pageOverFlow
.setChoices( new OptionValue[]{
new OptionValue(
IPDFRenderOption.CLIP_CONTENT,
getMessage( "OptionDisplayValue.CLIP_CONTENT" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.FIT_TO_PAGE_SIZE,
getMessage( "OptionDisplayValue.FIT_TO_PAGE_SIZE" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.OUTPUT_TO_MULTIPLE_PAGES,
getMessage( "OptionDisplayValue.OUTPUT_TO_MULTIPLE_PAGES" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.ENLARGE_PAGE_SIZE,
getMessage( "OptionDisplayValue.ENLARGE_PAGE_SIZE" ) ) //$NON-NLS-1$
} );
pageOverFlow.setDefaultValue( IPDFRenderOption.CLIP_CONTENT );
pageOverFlow.setToolTip( null );
pageOverFlow
.setDescription( getMessage( "OptionDescription.PageOverFlow" ) ); //$NON-NLS-1$
// Initializes the option for copies.
ConfigurableOption copies = new ConfigurableOption(
PostscriptRenderOption.OPTION_COPIES );
copies.setDisplayName( getMessage( "OptionDisplayValue.Copies" ) ); //$NON-NLS-1$
copies.setDataType( IConfigurableOption.DataType.INTEGER );
copies.setDisplayType( IConfigurableOption.DisplayType.TEXT );
copies.setDefaultValue( 1 );
copies.setToolTip( null );
copies.setDescription( getMessage( "OptionDescription.Copies" ) ); //$NON-NLS-1$
// Initializes the option for collate.
ConfigurableOption collate = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLLATE );
collate.setDisplayName( getMessage( "OptionDisplayValue.Collate" ) ); //$NON-NLS-1$
collate.setDataType( IConfigurableOption.DataType.BOOLEAN );
collate.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
collate.setDefaultValue( Boolean.FALSE );
collate.setToolTip( null );
collate.setDescription( getMessage( "OptionDescription.Collate" ) ); //$NON-NLS-1$
// Initializes the option for duplex.
ConfigurableOption duplex = new ConfigurableOption(
PostscriptRenderOption.OPTION_DUPLEX );
duplex.setDisplayName( getMessage( "OptionDisplayValue.Duplex" ) ); //$NON-NLS-1$
duplex.setDataType( IConfigurableOption.DataType.STRING );
duplex.setDisplayType( IConfigurableOption.DisplayType.TEXT );
duplex.setDefaultValue( null );
duplex.setToolTip( null );
duplex.setDescription( getMessage( "OptionDescription.Duplex" ) ); //$NON-NLS-1$
// Initializes the option for paperSize.
ConfigurableOption paperSize = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_SIZE );
paperSize.setDisplayName( getMessage( "OptionDisplayValue.PaperSize" ) ); //$NON-NLS-1$
paperSize.setDataType( IConfigurableOption.DataType.STRING );
paperSize.setDisplayType( IConfigurableOption.DisplayType.TEXT );
paperSize.setDefaultValue( null );
paperSize.setToolTip( null );
paperSize.setDescription( getMessage( "OptionDescription.PaperSize" ) ); //$NON-NLS-1$
// Initializes the option for paperTray.
ConfigurableOption paperTray = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_TRAY );
paperTray.setDisplayName( getMessage( "OptionDisplayValue.PaperTray" ) ); //$NON-NLS-1$
paperTray.setDataType( IConfigurableOption.DataType.INTEGER );
paperTray.setDisplayType( IConfigurableOption.DisplayType.TEXT );
- paperTray.setDefaultValue( new Integer( 0 ) );
+ paperTray.setDefaultValue( null );
paperTray.setToolTip( null );
paperTray.setDescription( getMessage( "OptionDescription.PaperTray" ) ); //$NON-NLS-1$
ConfigurableOption scale = new ConfigurableOption(
PostscriptRenderOption.OPTION_SCALE );
scale.setDisplayName( getMessage( "OptionDisplayValue.Scale" ) ); //$NON-NLS-1$
scale.setDataType( IConfigurableOption.DataType.INTEGER );
scale.setDisplayType( IConfigurableOption.DisplayType.TEXT );
scale.setDefaultValue( new Integer( 100 ) );
scale.setToolTip( null );
scale.setDescription( getMessage( "OptionDescription.Scale" ) ); //$NON-NLS-1$
ConfigurableOption resolution = new ConfigurableOption(
PostscriptRenderOption.OPTION_RESOLUTION );
resolution
.setDisplayName( getMessage( "OptionDisplayValue.Resolution" ) ); //$NON-NLS-1$
resolution.setDataType( IConfigurableOption.DataType.STRING );
resolution.setDisplayType( IConfigurableOption.DisplayType.TEXT );
- resolution.setDefaultValue( "600 X 600" );
+ resolution.setDefaultValue( null );
resolution.setToolTip( null );
resolution
.setDescription( getMessage( "OptionDescription.Resolution" ) ); //$NON-NLS-1$
ConfigurableOption color = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLOR );
color.setDisplayName( getMessage( "OptionDisplayValue.Color" ) ); //$NON-NLS-1$
color.setDataType( IConfigurableOption.DataType.BOOLEAN );
color.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
color.setDefaultValue( Boolean.TRUE );
color.setToolTip( null );
color.setDescription( getMessage( "OptionDescription.Color" ) ); //$NON-NLS-1$
// Initializes the option for chart DPI.
ConfigurableOption chartDpi = new ConfigurableOption( CHART_DPI );
chartDpi.setDisplayName( getMessage( "OptionDisplayValue.ChartDpi" ) ); //$NON-NLS-1$
chartDpi.setDataType( IConfigurableOption.DataType.INTEGER );
chartDpi
.setDisplayType( IConfigurableOption.DisplayType.TEXT );
chartDpi.setDefaultValue( new Integer( 192 ) );
chartDpi
.setToolTip( "The DPI which chart engine uses to generate charts. For example, 192." );
chartDpi.setDescription( getMessage( "OptionDescription.ChartDpi" ) ); //$NON-NLS-1$
options = new IConfigurableOption[]{bidiProcessing, textWrapping,
fontSubstitution, pageOverFlow, copies, collate, duplex,
paperSize, paperTray, scale, resolution, color, chartDpi};
}
private String getMessage( String key )
{
return Messages.getString( key, locale );
}
@Override
public IConfigurableOptionObserver createOptionObserver( )
{
return new PostscriptOptionObserver( );
}
/*
* (non-Javadoc)
*
* @seeorg.eclipse.birt.report.engine.emitter.config.IEmitterDescriptor#
* getDescription()
*/
public String getDescription( )
{
return getMessage( "PostscriptEmitter.Description" ); //$NON-NLS-1$
}
/*
* (non-Javadoc)
*
* @seeorg.eclipse.birt.report.engine.emitter.config.IEmitterDescriptor#
* getDisplayName()
*/
public String getDisplayName( )
{
return getMessage( "PostscriptEmitter.DisplayName" ); //$NON-NLS-1$
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.birt.report.engine.emitter.config.IEmitterDescriptor#getID()
*/
public String getID( )
{
return "org.eclipse.birt.report.engine.emitter.postscript"; //$NON-NLS-1$
}
public String getRenderOptionName( String name )
{
assert name != null;
if ( TEXT_WRAPPING.equals( name ) )
{
return IPDFRenderOption.PDF_TEXT_WRAPPING;
}
if ( BIDI_PROCESSING.equals( name ) )
{
return IPDFRenderOption.PDF_BIDI_PROCESSING;
}
if ( FONT_SUBSTITUTION.equals( name ) )
{
return IPDFRenderOption.PDF_FONT_SUBSTITUTION;
}
if ( CHART_DPI.equals( name ) )
{
return IRenderOption.CHART_DPI;
}
return name;
}
class PostscriptOptionObserver extends AbstractConfigurableOptionObserver
{
@Override
public IConfigurableOption[] getOptions( )
{
return options;
}
@Override
public IRenderOption getPreferredRenderOption( )
{
RenderOption renderOption = new RenderOption( );
renderOption.setEmitterID( getID( ) );
renderOption.setOutputFormat( "postscript" ); //$NON-NLS-1$
if ( values != null && values.length > 0 )
{
for ( IOptionValue optionValue : values )
{
if ( optionValue != null )
{
renderOption.setOption(
getRenderOptionName( optionValue.getName( ) ),
optionValue.getValue( ) );
}
}
}
return renderOption;
}
}
}
| false | true | private void initOptions( )
{
// Initializes the option for BIDIProcessing.
ConfigurableOption bidiProcessing = new ConfigurableOption(
BIDI_PROCESSING );
bidiProcessing
.setDisplayName( getMessage( "OptionDisplayValue.BidiProcessing" ) ); //$NON-NLS-1$
bidiProcessing.setDataType( IConfigurableOption.DataType.BOOLEAN );
bidiProcessing.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
bidiProcessing.setDefaultValue( Boolean.TRUE );
bidiProcessing.setToolTip( null );
bidiProcessing
.setDescription( getMessage( "OptionDescription.BidiProcessing" ) ); //$NON-NLS-1$
// Initializes the option for TextWrapping.
ConfigurableOption textWrapping = new ConfigurableOption( TEXT_WRAPPING );
textWrapping
.setDisplayName( getMessage( "OptionDisplayValue.TextWrapping" ) ); //$NON-NLS-1$
textWrapping.setDataType( IConfigurableOption.DataType.BOOLEAN );
textWrapping.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
textWrapping.setDefaultValue( Boolean.TRUE );
textWrapping.setToolTip( null );
textWrapping
.setDescription( getMessage( "OptionDescription.TextWrapping" ) ); //$NON-NLS-1$
// Initializes the option for fontSubstitution.
ConfigurableOption fontSubstitution = new ConfigurableOption(
FONT_SUBSTITUTION );
fontSubstitution
.setDisplayName( getMessage( "OptionDisplayValue.FontSubstitution" ) );
fontSubstitution.setDataType( IConfigurableOption.DataType.BOOLEAN );
fontSubstitution
.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
fontSubstitution.setDefaultValue( Boolean.TRUE );
fontSubstitution.setToolTip( null );
fontSubstitution
.setDescription( getMessage( "OptionDescription.FontSubstitution" ) ); //$NON-NLS-1$
// Initializes the option for PageOverFlow.
ConfigurableOption pageOverFlow = new ConfigurableOption(
IPDFRenderOption.PAGE_OVERFLOW );
pageOverFlow
.setDisplayName( getMessage( "OptionDisplayValue.PageOverFlow" ) ); //$NON-NLS-1$
pageOverFlow.setDataType( IConfigurableOption.DataType.INTEGER );
pageOverFlow.setDisplayType( IConfigurableOption.DisplayType.COMBO );
pageOverFlow
.setChoices( new OptionValue[]{
new OptionValue(
IPDFRenderOption.CLIP_CONTENT,
getMessage( "OptionDisplayValue.CLIP_CONTENT" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.FIT_TO_PAGE_SIZE,
getMessage( "OptionDisplayValue.FIT_TO_PAGE_SIZE" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.OUTPUT_TO_MULTIPLE_PAGES,
getMessage( "OptionDisplayValue.OUTPUT_TO_MULTIPLE_PAGES" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.ENLARGE_PAGE_SIZE,
getMessage( "OptionDisplayValue.ENLARGE_PAGE_SIZE" ) ) //$NON-NLS-1$
} );
pageOverFlow.setDefaultValue( IPDFRenderOption.CLIP_CONTENT );
pageOverFlow.setToolTip( null );
pageOverFlow
.setDescription( getMessage( "OptionDescription.PageOverFlow" ) ); //$NON-NLS-1$
// Initializes the option for copies.
ConfigurableOption copies = new ConfigurableOption(
PostscriptRenderOption.OPTION_COPIES );
copies.setDisplayName( getMessage( "OptionDisplayValue.Copies" ) ); //$NON-NLS-1$
copies.setDataType( IConfigurableOption.DataType.INTEGER );
copies.setDisplayType( IConfigurableOption.DisplayType.TEXT );
copies.setDefaultValue( 1 );
copies.setToolTip( null );
copies.setDescription( getMessage( "OptionDescription.Copies" ) ); //$NON-NLS-1$
// Initializes the option for collate.
ConfigurableOption collate = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLLATE );
collate.setDisplayName( getMessage( "OptionDisplayValue.Collate" ) ); //$NON-NLS-1$
collate.setDataType( IConfigurableOption.DataType.BOOLEAN );
collate.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
collate.setDefaultValue( Boolean.FALSE );
collate.setToolTip( null );
collate.setDescription( getMessage( "OptionDescription.Collate" ) ); //$NON-NLS-1$
// Initializes the option for duplex.
ConfigurableOption duplex = new ConfigurableOption(
PostscriptRenderOption.OPTION_DUPLEX );
duplex.setDisplayName( getMessage( "OptionDisplayValue.Duplex" ) ); //$NON-NLS-1$
duplex.setDataType( IConfigurableOption.DataType.STRING );
duplex.setDisplayType( IConfigurableOption.DisplayType.TEXT );
duplex.setDefaultValue( null );
duplex.setToolTip( null );
duplex.setDescription( getMessage( "OptionDescription.Duplex" ) ); //$NON-NLS-1$
// Initializes the option for paperSize.
ConfigurableOption paperSize = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_SIZE );
paperSize.setDisplayName( getMessage( "OptionDisplayValue.PaperSize" ) ); //$NON-NLS-1$
paperSize.setDataType( IConfigurableOption.DataType.STRING );
paperSize.setDisplayType( IConfigurableOption.DisplayType.TEXT );
paperSize.setDefaultValue( null );
paperSize.setToolTip( null );
paperSize.setDescription( getMessage( "OptionDescription.PaperSize" ) ); //$NON-NLS-1$
// Initializes the option for paperTray.
ConfigurableOption paperTray = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_TRAY );
paperTray.setDisplayName( getMessage( "OptionDisplayValue.PaperTray" ) ); //$NON-NLS-1$
paperTray.setDataType( IConfigurableOption.DataType.INTEGER );
paperTray.setDisplayType( IConfigurableOption.DisplayType.TEXT );
paperTray.setDefaultValue( new Integer( 0 ) );
paperTray.setToolTip( null );
paperTray.setDescription( getMessage( "OptionDescription.PaperTray" ) ); //$NON-NLS-1$
ConfigurableOption scale = new ConfigurableOption(
PostscriptRenderOption.OPTION_SCALE );
scale.setDisplayName( getMessage( "OptionDisplayValue.Scale" ) ); //$NON-NLS-1$
scale.setDataType( IConfigurableOption.DataType.INTEGER );
scale.setDisplayType( IConfigurableOption.DisplayType.TEXT );
scale.setDefaultValue( new Integer( 100 ) );
scale.setToolTip( null );
scale.setDescription( getMessage( "OptionDescription.Scale" ) ); //$NON-NLS-1$
ConfigurableOption resolution = new ConfigurableOption(
PostscriptRenderOption.OPTION_RESOLUTION );
resolution
.setDisplayName( getMessage( "OptionDisplayValue.Resolution" ) ); //$NON-NLS-1$
resolution.setDataType( IConfigurableOption.DataType.STRING );
resolution.setDisplayType( IConfigurableOption.DisplayType.TEXT );
resolution.setDefaultValue( "600 X 600" );
resolution.setToolTip( null );
resolution
.setDescription( getMessage( "OptionDescription.Resolution" ) ); //$NON-NLS-1$
ConfigurableOption color = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLOR );
color.setDisplayName( getMessage( "OptionDisplayValue.Color" ) ); //$NON-NLS-1$
color.setDataType( IConfigurableOption.DataType.BOOLEAN );
color.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
color.setDefaultValue( Boolean.TRUE );
color.setToolTip( null );
color.setDescription( getMessage( "OptionDescription.Color" ) ); //$NON-NLS-1$
// Initializes the option for chart DPI.
ConfigurableOption chartDpi = new ConfigurableOption( CHART_DPI );
chartDpi.setDisplayName( getMessage( "OptionDisplayValue.ChartDpi" ) ); //$NON-NLS-1$
chartDpi.setDataType( IConfigurableOption.DataType.INTEGER );
chartDpi
.setDisplayType( IConfigurableOption.DisplayType.TEXT );
chartDpi.setDefaultValue( new Integer( 192 ) );
chartDpi
.setToolTip( "The DPI which chart engine uses to generate charts. For example, 192." );
chartDpi.setDescription( getMessage( "OptionDescription.ChartDpi" ) ); //$NON-NLS-1$
options = new IConfigurableOption[]{bidiProcessing, textWrapping,
fontSubstitution, pageOverFlow, copies, collate, duplex,
paperSize, paperTray, scale, resolution, color, chartDpi};
}
| private void initOptions( )
{
// Initializes the option for BIDIProcessing.
ConfigurableOption bidiProcessing = new ConfigurableOption(
BIDI_PROCESSING );
bidiProcessing
.setDisplayName( getMessage( "OptionDisplayValue.BidiProcessing" ) ); //$NON-NLS-1$
bidiProcessing.setDataType( IConfigurableOption.DataType.BOOLEAN );
bidiProcessing.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
bidiProcessing.setDefaultValue( Boolean.TRUE );
bidiProcessing.setToolTip( null );
bidiProcessing
.setDescription( getMessage( "OptionDescription.BidiProcessing" ) ); //$NON-NLS-1$
// Initializes the option for TextWrapping.
ConfigurableOption textWrapping = new ConfigurableOption( TEXT_WRAPPING );
textWrapping
.setDisplayName( getMessage( "OptionDisplayValue.TextWrapping" ) ); //$NON-NLS-1$
textWrapping.setDataType( IConfigurableOption.DataType.BOOLEAN );
textWrapping.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
textWrapping.setDefaultValue( Boolean.TRUE );
textWrapping.setToolTip( null );
textWrapping
.setDescription( getMessage( "OptionDescription.TextWrapping" ) ); //$NON-NLS-1$
// Initializes the option for fontSubstitution.
ConfigurableOption fontSubstitution = new ConfigurableOption(
FONT_SUBSTITUTION );
fontSubstitution
.setDisplayName( getMessage( "OptionDisplayValue.FontSubstitution" ) );
fontSubstitution.setDataType( IConfigurableOption.DataType.BOOLEAN );
fontSubstitution
.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
fontSubstitution.setDefaultValue( Boolean.TRUE );
fontSubstitution.setToolTip( null );
fontSubstitution
.setDescription( getMessage( "OptionDescription.FontSubstitution" ) ); //$NON-NLS-1$
// Initializes the option for PageOverFlow.
ConfigurableOption pageOverFlow = new ConfigurableOption(
IPDFRenderOption.PAGE_OVERFLOW );
pageOverFlow
.setDisplayName( getMessage( "OptionDisplayValue.PageOverFlow" ) ); //$NON-NLS-1$
pageOverFlow.setDataType( IConfigurableOption.DataType.INTEGER );
pageOverFlow.setDisplayType( IConfigurableOption.DisplayType.COMBO );
pageOverFlow
.setChoices( new OptionValue[]{
new OptionValue(
IPDFRenderOption.CLIP_CONTENT,
getMessage( "OptionDisplayValue.CLIP_CONTENT" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.FIT_TO_PAGE_SIZE,
getMessage( "OptionDisplayValue.FIT_TO_PAGE_SIZE" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.OUTPUT_TO_MULTIPLE_PAGES,
getMessage( "OptionDisplayValue.OUTPUT_TO_MULTIPLE_PAGES" ) ), //$NON-NLS-1$
new OptionValue(
IPDFRenderOption.ENLARGE_PAGE_SIZE,
getMessage( "OptionDisplayValue.ENLARGE_PAGE_SIZE" ) ) //$NON-NLS-1$
} );
pageOverFlow.setDefaultValue( IPDFRenderOption.CLIP_CONTENT );
pageOverFlow.setToolTip( null );
pageOverFlow
.setDescription( getMessage( "OptionDescription.PageOverFlow" ) ); //$NON-NLS-1$
// Initializes the option for copies.
ConfigurableOption copies = new ConfigurableOption(
PostscriptRenderOption.OPTION_COPIES );
copies.setDisplayName( getMessage( "OptionDisplayValue.Copies" ) ); //$NON-NLS-1$
copies.setDataType( IConfigurableOption.DataType.INTEGER );
copies.setDisplayType( IConfigurableOption.DisplayType.TEXT );
copies.setDefaultValue( 1 );
copies.setToolTip( null );
copies.setDescription( getMessage( "OptionDescription.Copies" ) ); //$NON-NLS-1$
// Initializes the option for collate.
ConfigurableOption collate = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLLATE );
collate.setDisplayName( getMessage( "OptionDisplayValue.Collate" ) ); //$NON-NLS-1$
collate.setDataType( IConfigurableOption.DataType.BOOLEAN );
collate.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
collate.setDefaultValue( Boolean.FALSE );
collate.setToolTip( null );
collate.setDescription( getMessage( "OptionDescription.Collate" ) ); //$NON-NLS-1$
// Initializes the option for duplex.
ConfigurableOption duplex = new ConfigurableOption(
PostscriptRenderOption.OPTION_DUPLEX );
duplex.setDisplayName( getMessage( "OptionDisplayValue.Duplex" ) ); //$NON-NLS-1$
duplex.setDataType( IConfigurableOption.DataType.STRING );
duplex.setDisplayType( IConfigurableOption.DisplayType.TEXT );
duplex.setDefaultValue( null );
duplex.setToolTip( null );
duplex.setDescription( getMessage( "OptionDescription.Duplex" ) ); //$NON-NLS-1$
// Initializes the option for paperSize.
ConfigurableOption paperSize = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_SIZE );
paperSize.setDisplayName( getMessage( "OptionDisplayValue.PaperSize" ) ); //$NON-NLS-1$
paperSize.setDataType( IConfigurableOption.DataType.STRING );
paperSize.setDisplayType( IConfigurableOption.DisplayType.TEXT );
paperSize.setDefaultValue( null );
paperSize.setToolTip( null );
paperSize.setDescription( getMessage( "OptionDescription.PaperSize" ) ); //$NON-NLS-1$
// Initializes the option for paperTray.
ConfigurableOption paperTray = new ConfigurableOption(
PostscriptRenderOption.OPTION_PAPER_TRAY );
paperTray.setDisplayName( getMessage( "OptionDisplayValue.PaperTray" ) ); //$NON-NLS-1$
paperTray.setDataType( IConfigurableOption.DataType.INTEGER );
paperTray.setDisplayType( IConfigurableOption.DisplayType.TEXT );
paperTray.setDefaultValue( null );
paperTray.setToolTip( null );
paperTray.setDescription( getMessage( "OptionDescription.PaperTray" ) ); //$NON-NLS-1$
ConfigurableOption scale = new ConfigurableOption(
PostscriptRenderOption.OPTION_SCALE );
scale.setDisplayName( getMessage( "OptionDisplayValue.Scale" ) ); //$NON-NLS-1$
scale.setDataType( IConfigurableOption.DataType.INTEGER );
scale.setDisplayType( IConfigurableOption.DisplayType.TEXT );
scale.setDefaultValue( new Integer( 100 ) );
scale.setToolTip( null );
scale.setDescription( getMessage( "OptionDescription.Scale" ) ); //$NON-NLS-1$
ConfigurableOption resolution = new ConfigurableOption(
PostscriptRenderOption.OPTION_RESOLUTION );
resolution
.setDisplayName( getMessage( "OptionDisplayValue.Resolution" ) ); //$NON-NLS-1$
resolution.setDataType( IConfigurableOption.DataType.STRING );
resolution.setDisplayType( IConfigurableOption.DisplayType.TEXT );
resolution.setDefaultValue( null );
resolution.setToolTip( null );
resolution
.setDescription( getMessage( "OptionDescription.Resolution" ) ); //$NON-NLS-1$
ConfigurableOption color = new ConfigurableOption(
PostscriptRenderOption.OPTION_COLOR );
color.setDisplayName( getMessage( "OptionDisplayValue.Color" ) ); //$NON-NLS-1$
color.setDataType( IConfigurableOption.DataType.BOOLEAN );
color.setDisplayType( IConfigurableOption.DisplayType.CHECKBOX );
color.setDefaultValue( Boolean.TRUE );
color.setToolTip( null );
color.setDescription( getMessage( "OptionDescription.Color" ) ); //$NON-NLS-1$
// Initializes the option for chart DPI.
ConfigurableOption chartDpi = new ConfigurableOption( CHART_DPI );
chartDpi.setDisplayName( getMessage( "OptionDisplayValue.ChartDpi" ) ); //$NON-NLS-1$
chartDpi.setDataType( IConfigurableOption.DataType.INTEGER );
chartDpi
.setDisplayType( IConfigurableOption.DisplayType.TEXT );
chartDpi.setDefaultValue( new Integer( 192 ) );
chartDpi
.setToolTip( "The DPI which chart engine uses to generate charts. For example, 192." );
chartDpi.setDescription( getMessage( "OptionDescription.ChartDpi" ) ); //$NON-NLS-1$
options = new IConfigurableOption[]{bidiProcessing, textWrapping,
fontSubstitution, pageOverFlow, copies, collate, duplex,
paperSize, paperTray, scale, resolution, color, chartDpi};
}
|
diff --git a/src/main/java/org/generationcp/browser/study/util/DatasetExporter.java b/src/main/java/org/generationcp/browser/study/util/DatasetExporter.java
index 4c3d647..7845c49 100644
--- a/src/main/java/org/generationcp/browser/study/util/DatasetExporter.java
+++ b/src/main/java/org/generationcp/browser/study/util/DatasetExporter.java
@@ -1,504 +1,512 @@
package org.generationcp.browser.study.util;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.CellRangeAddress;
import org.generationcp.commons.util.PoiUtil;
import org.generationcp.middleware.exceptions.MiddlewareQueryException;
import org.generationcp.middleware.manager.api.StudyDataManager;
import org.generationcp.middleware.manager.api.TraitDataManager;
import org.generationcp.middleware.pojos.CharacterDataElement;
import org.generationcp.middleware.pojos.CharacterLevelElement;
import org.generationcp.middleware.pojos.DatasetCondition;
import org.generationcp.middleware.pojos.Factor;
import org.generationcp.middleware.pojos.NumericDataElement;
import org.generationcp.middleware.pojos.NumericLevelElement;
import org.generationcp.middleware.pojos.Scale;
import org.generationcp.middleware.pojos.Study;
import org.generationcp.middleware.pojos.Trait;
import org.generationcp.middleware.pojos.TraitMethod;
import org.generationcp.middleware.pojos.Variate;
public class DatasetExporter {
private static final int conditionListHeaderRowIndex = 8;
private StudyDataManager studyDataManager;
private TraitDataManager traitDataManager;
private Integer studyId;
private Integer representationId;
public DatasetExporter(StudyDataManager studyDataManager, TraitDataManager traitDataManager, Integer studyId, Integer representationId) {
this.studyDataManager = studyDataManager;
this.traitDataManager = traitDataManager;
this.studyId = studyId;
this.representationId = representationId;
}
public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException {
//create workbook
Workbook workbook = new HSSFWorkbook();
CellStyle cellStyle = workbook.createCellStyle();
CellStyle cellStyleForObservationSheet = workbook.createCellStyle();
//create two sheets, one for description and nother for measurements
Sheet descriptionSheet = workbook.createSheet("Description");
Sheet observationSheet = workbook.createSheet("Observation");
//this map is for mapping the columns names of the dataset to their column index in the excel sheet
Map<String, Integer> columnsMap = new HashMap<String, Integer>();
int observationSheetColumnIndex = 0;
//write the details on the first sheet - description
//get the study first
Study study = null;
try {
study = this.studyDataManager.getStudyByID(this.studyId);
} catch (MiddlewareQueryException ex) {
throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex);
}
if(study != null) {
//get the needed study details
String name = study.getName();
String title = study.getTitle();
Integer pmkey = study.getProjectKey();
String objective = study.getObjective();
Integer startDate = study.getStartDate();
Integer endDate = study.getEndDate();
String type = study.getType();
//add to the sheet
Row row0 = descriptionSheet.createRow(0);
row0.createCell(0).setCellValue("STUDY");
row0.createCell(1).setCellValue(name);
Row row1 = descriptionSheet.createRow(1);
row1.createCell(0).setCellValue("TITLE");
row1.createCell(1).setCellValue(title);
Row row2 = descriptionSheet.createRow(2);
row2.createCell(0).setCellValue("PMKEY");
Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
pmKeyCell.setCellValue(pmkey);
Row row3 = descriptionSheet.createRow(3);
row3.createCell(0).setCellValue("OBJECTIVE");
row3.createCell(1).setCellValue(objective);
Row row4 = descriptionSheet.createRow(4);
row4.createCell(0).setCellValue("START DATE");
Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
startDateCell.setCellValue(startDate);
Row row5 = descriptionSheet.createRow(5);
row5.createCell(0).setCellValue("END DATE");
Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
endDateCell.setCellValue(endDate);
Row row6 = descriptionSheet.createRow(6);
row6.createCell(0).setCellValue("STUDY TYPE");
row6.createCell(1).setCellValue(type);
//merge cells for the study details
for(int ctr = 0; ctr < 7; ctr++) {
descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7));
}
//empty row
Row row7 = descriptionSheet.createRow(7);
//row with headings for condition list
Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex);
conditionHeaderRow.createCell(0).setCellValue("CONDITION");
conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION");
conditionHeaderRow.createCell(2).setCellValue("PROPERTY");
conditionHeaderRow.createCell(3).setCellValue("SCALE");
conditionHeaderRow.createCell(4).setCellValue("METHOD");
conditionHeaderRow.createCell(5).setCellValue("DATA TYPE");
conditionHeaderRow.createCell(6).setCellValue("VALUE");
conditionHeaderRow.createCell(7).setCellValue("LABEL");
//get the conditions and their details
List<DatasetCondition> conditions = new ArrayList<DatasetCondition>();
try {
conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting conditions of study - " + name
+ ", representation - " + this.representationId, ex);
}
int conditionRowIndex = this.conditionListHeaderRowIndex + 1;
for(DatasetCondition condition : conditions) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId());
String conditionName = condition.getName();
if(conditionName != null) {
conditionName = conditionName.trim();
}
String conditionType = condition.getType();
String conditionLabel = "";
try {
conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId());
} catch (MiddlewareQueryException ex) {
conditionLabel = "";
}
Row conditionRow = descriptionSheet.createRow(conditionRowIndex);
conditionRow.createCell(0).setCellValue(conditionName);
conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
conditionRow.createCell(5).setCellValue(conditionType);
if(conditionType.equals("N")) {
Double thevalue = (Double) condition.getValue();
conditionRow.createCell(6).setCellValue(thevalue);
} else {
conditionRow.createCell(6).setCellValue(condition.getValue().toString());
}
conditionRow.createCell(7).setCellValue(conditionLabel);
//add entry to columns mapping
//we set the value to -1 to signify that this should not be a column in the observation sheet
if(!conditionName.equals("STUDY")) {
columnsMap.put(conditionName, Integer.valueOf(-1));
}
conditionRowIndex++;
}
//empty row
Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex);
//row with headings for factor list
int factorRowHeaderIndex = conditionRowIndex + 1;
Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex);
factorHeaderRow.createCell(0).setCellValue("FACTOR");
factorHeaderRow.createCell(1).setCellValue("DESCRIPTION");
factorHeaderRow.createCell(2).setCellValue("PROPERTY");
factorHeaderRow.createCell(3).setCellValue("SCALE");
factorHeaderRow.createCell(4).setCellValue("METHOD");
factorHeaderRow.createCell(5).setCellValue("DATA TYPE");
factorHeaderRow.createCell(6).setCellValue("");
factorHeaderRow.createCell(7).setCellValue("LABEL");
//get the factors and their details
List<Factor> factors = new ArrayList<Factor>();
try {
factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting factors of study - " + name
+ ", representation - " + this.representationId, ex);
}
int factorRowIndex = factorRowHeaderIndex + 1;
for(Factor factor : factors) {
String dataType = factor.getDataType();
String factorName = factor.getName();
if(factorName != null) {
factorName = factorName.trim();
}
//check if factor is already written as a condition
Integer temp = columnsMap.get(factorName);
if(temp == null && !factorName.equals("STUDY")) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId());
String factorLabel = "";
try {
factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId());
} catch (MiddlewareQueryException ex) {
factorLabel = "";
}
Row factorRow = descriptionSheet.createRow(factorRowIndex);
factorRow.createCell(0).setCellValue(factorName);
factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
factorRow.createCell(5).setCellValue(dataType);
factorRow.createCell(6).setCellValue("");
factorRow.createCell(7).setCellValue(factorLabel);
//add entry to columns mapping
columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
factorRowIndex++;
}
}
//empty row
Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex);
//row with headings for variate list
int variateHeaderRowIndex = factorRowIndex + 1;
Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex);
variateHeaderRow.createCell(0).setCellValue("VARIATE");
variateHeaderRow.createCell(1).setCellValue("DESCRIPTION");
variateHeaderRow.createCell(2).setCellValue("PROPERTY");
variateHeaderRow.createCell(3).setCellValue("SCALE");
variateHeaderRow.createCell(4).setCellValue("METHOD");
variateHeaderRow.createCell(5).setCellValue("DATA TYPE");
//get the variates and their details
List<Variate> variates = new ArrayList<Variate>();
try {
variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId));
}
catch(Exception ex) {
throw new DatasetExporterException("Error with getting variates of study - " + name
+ ", representation - " + this.representationId, ex);
}
int variateRowIndex = variateHeaderRowIndex + 1;
for(Variate variate : variates) {
String dataType = variate.getDataType();
String variateName = variate.getName();
if(variateName != null) {
variateName = variateName.trim();
}
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId());
Row variateRow = descriptionSheet.createRow(variateRowIndex);
variateRow.createCell(0).setCellValue(variateName);
variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
variateRow.createCell(5).setCellValue(dataType);
//add entry to columns mapping
columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
variateRowIndex++;
}
//populate the measurements sheet
//establish the columns of the dataset first
Row datasetHeaderRow = observationSheet.createRow(0);
for(String columnName : columnsMap.keySet()) {
short columnIndex = columnsMap.get(columnName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
cell.setCellValue(columnName);
}
}
//then work with the data
//do it by 50 rows at a time
int pageSize = 50;
long totalNumberOfRows = 0;
int sheetRowIndex = 1;
try {
totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId);
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name
+ ", representation - " + this.representationId, ex);
}
for(int start = 0; start < totalNumberOfRows; start = start + pageSize) {
List<Integer> ounitIds = new ArrayList<Integer>();
try {
//first get the ounit ids, these are the ids of the rows in the dataset
ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting ounit ids of study - " + name
+ ", representation - " + this.representationId, ex);
}
if(!ounitIds.isEmpty()) {
//map each ounit id into a row in the observation sheet
Map<Integer, Row> rowMap = new HashMap<Integer, Row>();
for(Integer ounitId : ounitIds) {
Row row = observationSheet.createRow(sheetRowIndex);
sheetRowIndex++;
rowMap.put(ounitId, row);
}
//then get the data for each of the observation units (ounits)
List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>();
try {
charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterLevelElement elem : charLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
}
}
List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>();
try {
numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericLevelElement elem : numericLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
- cell.setCellValue(elem.getValue());
+ double elemValue = 0;
+ if(elem.getValue() != null){
+ elemValue = elem.getValue().doubleValue();
+ }
+ cell.setCellValue(elemValue);
}
}
}
}
List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>();
try {
charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterDataElement elem : charDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>();
try {
numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericDataElement elem : numericDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
- cell.setCellValue(elem.getValue());
+ double elemValue = 0;
+ if(elem.getValue() != null){
+ elemValue = elem.getValue().doubleValue();
+ }
+ cell.setCellValue(elemValue);
}
}
}
}
}
//adjust column widths of description sheet to fit contents
for(int ctr = 0; ctr < 8; ctr++) {
if(ctr != 1) {
descriptionSheet.autoSizeColumn(ctr);
}
}
//adjust column widths of observation sheet to fit contents
for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) {
observationSheet.autoSizeColumn(ctr);
}
try {
//write the excel file
FileOutputStream fileOutputStream = new FileOutputStream(filename);
workbook.write(fileOutputStream);
fileOutputStream.close();
return fileOutputStream;
} catch(Exception ex) {
throw new DatasetExporterException("Error with writing to: " + filename, ex);
}
}
private String[] getTraitScaleMethodInfo(Integer traitId, Integer scaleId, Integer methodId) throws DatasetExporterException {
String toreturn[] = new String[4];
try {
Trait trait = this.traitDataManager.getTraitById(traitId);
Scale scale = this.traitDataManager.getScaleByID(scaleId);
TraitMethod method = this.traitDataManager.getTraitMethodById(methodId);
if(trait != null){
toreturn[0] = trait.getDescripton();
toreturn[1] = trait.getName();
} else {
toreturn[0] = "Not specified";
toreturn[1] = "Not specified";
}
if(scale != null){
toreturn[2] = scale.getName();
} else {
toreturn[2] = "Not specified";
}
if(method != null){
toreturn[3] = method.getName();
} else {
toreturn[3] = "Not specified";
}
}
catch(Exception ex) {
throw new DatasetExporterException("Error with getting trait, scale, and method information for " +
"trait id = " + traitId +
" scale id = " + scaleId +
" method id = " + methodId, ex);
}
return toreturn;
}
}
| false | true | public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException {
//create workbook
Workbook workbook = new HSSFWorkbook();
CellStyle cellStyle = workbook.createCellStyle();
CellStyle cellStyleForObservationSheet = workbook.createCellStyle();
//create two sheets, one for description and nother for measurements
Sheet descriptionSheet = workbook.createSheet("Description");
Sheet observationSheet = workbook.createSheet("Observation");
//this map is for mapping the columns names of the dataset to their column index in the excel sheet
Map<String, Integer> columnsMap = new HashMap<String, Integer>();
int observationSheetColumnIndex = 0;
//write the details on the first sheet - description
//get the study first
Study study = null;
try {
study = this.studyDataManager.getStudyByID(this.studyId);
} catch (MiddlewareQueryException ex) {
throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex);
}
if(study != null) {
//get the needed study details
String name = study.getName();
String title = study.getTitle();
Integer pmkey = study.getProjectKey();
String objective = study.getObjective();
Integer startDate = study.getStartDate();
Integer endDate = study.getEndDate();
String type = study.getType();
//add to the sheet
Row row0 = descriptionSheet.createRow(0);
row0.createCell(0).setCellValue("STUDY");
row0.createCell(1).setCellValue(name);
Row row1 = descriptionSheet.createRow(1);
row1.createCell(0).setCellValue("TITLE");
row1.createCell(1).setCellValue(title);
Row row2 = descriptionSheet.createRow(2);
row2.createCell(0).setCellValue("PMKEY");
Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
pmKeyCell.setCellValue(pmkey);
Row row3 = descriptionSheet.createRow(3);
row3.createCell(0).setCellValue("OBJECTIVE");
row3.createCell(1).setCellValue(objective);
Row row4 = descriptionSheet.createRow(4);
row4.createCell(0).setCellValue("START DATE");
Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
startDateCell.setCellValue(startDate);
Row row5 = descriptionSheet.createRow(5);
row5.createCell(0).setCellValue("END DATE");
Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
endDateCell.setCellValue(endDate);
Row row6 = descriptionSheet.createRow(6);
row6.createCell(0).setCellValue("STUDY TYPE");
row6.createCell(1).setCellValue(type);
//merge cells for the study details
for(int ctr = 0; ctr < 7; ctr++) {
descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7));
}
//empty row
Row row7 = descriptionSheet.createRow(7);
//row with headings for condition list
Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex);
conditionHeaderRow.createCell(0).setCellValue("CONDITION");
conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION");
conditionHeaderRow.createCell(2).setCellValue("PROPERTY");
conditionHeaderRow.createCell(3).setCellValue("SCALE");
conditionHeaderRow.createCell(4).setCellValue("METHOD");
conditionHeaderRow.createCell(5).setCellValue("DATA TYPE");
conditionHeaderRow.createCell(6).setCellValue("VALUE");
conditionHeaderRow.createCell(7).setCellValue("LABEL");
//get the conditions and their details
List<DatasetCondition> conditions = new ArrayList<DatasetCondition>();
try {
conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting conditions of study - " + name
+ ", representation - " + this.representationId, ex);
}
int conditionRowIndex = this.conditionListHeaderRowIndex + 1;
for(DatasetCondition condition : conditions) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId());
String conditionName = condition.getName();
if(conditionName != null) {
conditionName = conditionName.trim();
}
String conditionType = condition.getType();
String conditionLabel = "";
try {
conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId());
} catch (MiddlewareQueryException ex) {
conditionLabel = "";
}
Row conditionRow = descriptionSheet.createRow(conditionRowIndex);
conditionRow.createCell(0).setCellValue(conditionName);
conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
conditionRow.createCell(5).setCellValue(conditionType);
if(conditionType.equals("N")) {
Double thevalue = (Double) condition.getValue();
conditionRow.createCell(6).setCellValue(thevalue);
} else {
conditionRow.createCell(6).setCellValue(condition.getValue().toString());
}
conditionRow.createCell(7).setCellValue(conditionLabel);
//add entry to columns mapping
//we set the value to -1 to signify that this should not be a column in the observation sheet
if(!conditionName.equals("STUDY")) {
columnsMap.put(conditionName, Integer.valueOf(-1));
}
conditionRowIndex++;
}
//empty row
Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex);
//row with headings for factor list
int factorRowHeaderIndex = conditionRowIndex + 1;
Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex);
factorHeaderRow.createCell(0).setCellValue("FACTOR");
factorHeaderRow.createCell(1).setCellValue("DESCRIPTION");
factorHeaderRow.createCell(2).setCellValue("PROPERTY");
factorHeaderRow.createCell(3).setCellValue("SCALE");
factorHeaderRow.createCell(4).setCellValue("METHOD");
factorHeaderRow.createCell(5).setCellValue("DATA TYPE");
factorHeaderRow.createCell(6).setCellValue("");
factorHeaderRow.createCell(7).setCellValue("LABEL");
//get the factors and their details
List<Factor> factors = new ArrayList<Factor>();
try {
factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting factors of study - " + name
+ ", representation - " + this.representationId, ex);
}
int factorRowIndex = factorRowHeaderIndex + 1;
for(Factor factor : factors) {
String dataType = factor.getDataType();
String factorName = factor.getName();
if(factorName != null) {
factorName = factorName.trim();
}
//check if factor is already written as a condition
Integer temp = columnsMap.get(factorName);
if(temp == null && !factorName.equals("STUDY")) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId());
String factorLabel = "";
try {
factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId());
} catch (MiddlewareQueryException ex) {
factorLabel = "";
}
Row factorRow = descriptionSheet.createRow(factorRowIndex);
factorRow.createCell(0).setCellValue(factorName);
factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
factorRow.createCell(5).setCellValue(dataType);
factorRow.createCell(6).setCellValue("");
factorRow.createCell(7).setCellValue(factorLabel);
//add entry to columns mapping
columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
factorRowIndex++;
}
}
//empty row
Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex);
//row with headings for variate list
int variateHeaderRowIndex = factorRowIndex + 1;
Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex);
variateHeaderRow.createCell(0).setCellValue("VARIATE");
variateHeaderRow.createCell(1).setCellValue("DESCRIPTION");
variateHeaderRow.createCell(2).setCellValue("PROPERTY");
variateHeaderRow.createCell(3).setCellValue("SCALE");
variateHeaderRow.createCell(4).setCellValue("METHOD");
variateHeaderRow.createCell(5).setCellValue("DATA TYPE");
//get the variates and their details
List<Variate> variates = new ArrayList<Variate>();
try {
variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId));
}
catch(Exception ex) {
throw new DatasetExporterException("Error with getting variates of study - " + name
+ ", representation - " + this.representationId, ex);
}
int variateRowIndex = variateHeaderRowIndex + 1;
for(Variate variate : variates) {
String dataType = variate.getDataType();
String variateName = variate.getName();
if(variateName != null) {
variateName = variateName.trim();
}
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId());
Row variateRow = descriptionSheet.createRow(variateRowIndex);
variateRow.createCell(0).setCellValue(variateName);
variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
variateRow.createCell(5).setCellValue(dataType);
//add entry to columns mapping
columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
variateRowIndex++;
}
//populate the measurements sheet
//establish the columns of the dataset first
Row datasetHeaderRow = observationSheet.createRow(0);
for(String columnName : columnsMap.keySet()) {
short columnIndex = columnsMap.get(columnName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
cell.setCellValue(columnName);
}
}
//then work with the data
//do it by 50 rows at a time
int pageSize = 50;
long totalNumberOfRows = 0;
int sheetRowIndex = 1;
try {
totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId);
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name
+ ", representation - " + this.representationId, ex);
}
for(int start = 0; start < totalNumberOfRows; start = start + pageSize) {
List<Integer> ounitIds = new ArrayList<Integer>();
try {
//first get the ounit ids, these are the ids of the rows in the dataset
ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting ounit ids of study - " + name
+ ", representation - " + this.representationId, ex);
}
if(!ounitIds.isEmpty()) {
//map each ounit id into a row in the observation sheet
Map<Integer, Row> rowMap = new HashMap<Integer, Row>();
for(Integer ounitId : ounitIds) {
Row row = observationSheet.createRow(sheetRowIndex);
sheetRowIndex++;
rowMap.put(ounitId, row);
}
//then get the data for each of the observation units (ounits)
List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>();
try {
charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterLevelElement elem : charLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
}
}
List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>();
try {
numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericLevelElement elem : numericLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
cell.setCellValue(elem.getValue());
}
}
}
}
List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>();
try {
charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterDataElement elem : charDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>();
try {
numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericDataElement elem : numericDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
cell.setCellValue(elem.getValue());
}
}
}
}
}
//adjust column widths of description sheet to fit contents
for(int ctr = 0; ctr < 8; ctr++) {
if(ctr != 1) {
descriptionSheet.autoSizeColumn(ctr);
}
}
//adjust column widths of observation sheet to fit contents
for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) {
observationSheet.autoSizeColumn(ctr);
}
try {
//write the excel file
FileOutputStream fileOutputStream = new FileOutputStream(filename);
workbook.write(fileOutputStream);
fileOutputStream.close();
return fileOutputStream;
} catch(Exception ex) {
throw new DatasetExporterException("Error with writing to: " + filename, ex);
}
}
| public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException {
//create workbook
Workbook workbook = new HSSFWorkbook();
CellStyle cellStyle = workbook.createCellStyle();
CellStyle cellStyleForObservationSheet = workbook.createCellStyle();
//create two sheets, one for description and nother for measurements
Sheet descriptionSheet = workbook.createSheet("Description");
Sheet observationSheet = workbook.createSheet("Observation");
//this map is for mapping the columns names of the dataset to their column index in the excel sheet
Map<String, Integer> columnsMap = new HashMap<String, Integer>();
int observationSheetColumnIndex = 0;
//write the details on the first sheet - description
//get the study first
Study study = null;
try {
study = this.studyDataManager.getStudyByID(this.studyId);
} catch (MiddlewareQueryException ex) {
throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex);
}
if(study != null) {
//get the needed study details
String name = study.getName();
String title = study.getTitle();
Integer pmkey = study.getProjectKey();
String objective = study.getObjective();
Integer startDate = study.getStartDate();
Integer endDate = study.getEndDate();
String type = study.getType();
//add to the sheet
Row row0 = descriptionSheet.createRow(0);
row0.createCell(0).setCellValue("STUDY");
row0.createCell(1).setCellValue(name);
Row row1 = descriptionSheet.createRow(1);
row1.createCell(0).setCellValue("TITLE");
row1.createCell(1).setCellValue(title);
Row row2 = descriptionSheet.createRow(2);
row2.createCell(0).setCellValue("PMKEY");
Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
pmKeyCell.setCellValue(pmkey);
Row row3 = descriptionSheet.createRow(3);
row3.createCell(0).setCellValue("OBJECTIVE");
row3.createCell(1).setCellValue(objective);
Row row4 = descriptionSheet.createRow(4);
row4.createCell(0).setCellValue("START DATE");
Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
startDateCell.setCellValue(startDate);
Row row5 = descriptionSheet.createRow(5);
row5.createCell(0).setCellValue("END DATE");
Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY);
endDateCell.setCellValue(endDate);
Row row6 = descriptionSheet.createRow(6);
row6.createCell(0).setCellValue("STUDY TYPE");
row6.createCell(1).setCellValue(type);
//merge cells for the study details
for(int ctr = 0; ctr < 7; ctr++) {
descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7));
}
//empty row
Row row7 = descriptionSheet.createRow(7);
//row with headings for condition list
Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex);
conditionHeaderRow.createCell(0).setCellValue("CONDITION");
conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION");
conditionHeaderRow.createCell(2).setCellValue("PROPERTY");
conditionHeaderRow.createCell(3).setCellValue("SCALE");
conditionHeaderRow.createCell(4).setCellValue("METHOD");
conditionHeaderRow.createCell(5).setCellValue("DATA TYPE");
conditionHeaderRow.createCell(6).setCellValue("VALUE");
conditionHeaderRow.createCell(7).setCellValue("LABEL");
//get the conditions and their details
List<DatasetCondition> conditions = new ArrayList<DatasetCondition>();
try {
conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting conditions of study - " + name
+ ", representation - " + this.representationId, ex);
}
int conditionRowIndex = this.conditionListHeaderRowIndex + 1;
for(DatasetCondition condition : conditions) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId());
String conditionName = condition.getName();
if(conditionName != null) {
conditionName = conditionName.trim();
}
String conditionType = condition.getType();
String conditionLabel = "";
try {
conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId());
} catch (MiddlewareQueryException ex) {
conditionLabel = "";
}
Row conditionRow = descriptionSheet.createRow(conditionRowIndex);
conditionRow.createCell(0).setCellValue(conditionName);
conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
conditionRow.createCell(5).setCellValue(conditionType);
if(conditionType.equals("N")) {
Double thevalue = (Double) condition.getValue();
conditionRow.createCell(6).setCellValue(thevalue);
} else {
conditionRow.createCell(6).setCellValue(condition.getValue().toString());
}
conditionRow.createCell(7).setCellValue(conditionLabel);
//add entry to columns mapping
//we set the value to -1 to signify that this should not be a column in the observation sheet
if(!conditionName.equals("STUDY")) {
columnsMap.put(conditionName, Integer.valueOf(-1));
}
conditionRowIndex++;
}
//empty row
Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex);
//row with headings for factor list
int factorRowHeaderIndex = conditionRowIndex + 1;
Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex);
factorHeaderRow.createCell(0).setCellValue("FACTOR");
factorHeaderRow.createCell(1).setCellValue("DESCRIPTION");
factorHeaderRow.createCell(2).setCellValue("PROPERTY");
factorHeaderRow.createCell(3).setCellValue("SCALE");
factorHeaderRow.createCell(4).setCellValue("METHOD");
factorHeaderRow.createCell(5).setCellValue("DATA TYPE");
factorHeaderRow.createCell(6).setCellValue("");
factorHeaderRow.createCell(7).setCellValue("LABEL");
//get the factors and their details
List<Factor> factors = new ArrayList<Factor>();
try {
factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting factors of study - " + name
+ ", representation - " + this.representationId, ex);
}
int factorRowIndex = factorRowHeaderIndex + 1;
for(Factor factor : factors) {
String dataType = factor.getDataType();
String factorName = factor.getName();
if(factorName != null) {
factorName = factorName.trim();
}
//check if factor is already written as a condition
Integer temp = columnsMap.get(factorName);
if(temp == null && !factorName.equals("STUDY")) {
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId());
String factorLabel = "";
try {
factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId());
} catch (MiddlewareQueryException ex) {
factorLabel = "";
}
Row factorRow = descriptionSheet.createRow(factorRowIndex);
factorRow.createCell(0).setCellValue(factorName);
factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
factorRow.createCell(5).setCellValue(dataType);
factorRow.createCell(6).setCellValue("");
factorRow.createCell(7).setCellValue(factorLabel);
//add entry to columns mapping
columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
factorRowIndex++;
}
}
//empty row
Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex);
//row with headings for variate list
int variateHeaderRowIndex = factorRowIndex + 1;
Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex);
variateHeaderRow.createCell(0).setCellValue("VARIATE");
variateHeaderRow.createCell(1).setCellValue("DESCRIPTION");
variateHeaderRow.createCell(2).setCellValue("PROPERTY");
variateHeaderRow.createCell(3).setCellValue("SCALE");
variateHeaderRow.createCell(4).setCellValue("METHOD");
variateHeaderRow.createCell(5).setCellValue("DATA TYPE");
//get the variates and their details
List<Variate> variates = new ArrayList<Variate>();
try {
variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId));
}
catch(Exception ex) {
throw new DatasetExporterException("Error with getting variates of study - " + name
+ ", representation - " + this.representationId, ex);
}
int variateRowIndex = variateHeaderRowIndex + 1;
for(Variate variate : variates) {
String dataType = variate.getDataType();
String variateName = variate.getName();
if(variateName != null) {
variateName = variateName.trim();
}
String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId());
Row variateRow = descriptionSheet.createRow(variateRowIndex);
variateRow.createCell(0).setCellValue(variateName);
variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]);
variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]);
variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]);
variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]);
variateRow.createCell(5).setCellValue(dataType);
//add entry to columns mapping
columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex));
observationSheetColumnIndex++;
variateRowIndex++;
}
//populate the measurements sheet
//establish the columns of the dataset first
Row datasetHeaderRow = observationSheet.createRow(0);
for(String columnName : columnsMap.keySet()) {
short columnIndex = columnsMap.get(columnName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
cell.setCellValue(columnName);
}
}
//then work with the data
//do it by 50 rows at a time
int pageSize = 50;
long totalNumberOfRows = 0;
int sheetRowIndex = 1;
try {
totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId);
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name
+ ", representation - " + this.representationId, ex);
}
for(int start = 0; start < totalNumberOfRows; start = start + pageSize) {
List<Integer> ounitIds = new ArrayList<Integer>();
try {
//first get the ounit ids, these are the ids of the rows in the dataset
ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting ounit ids of study - " + name
+ ", representation - " + this.representationId, ex);
}
if(!ounitIds.isEmpty()) {
//map each ounit id into a row in the observation sheet
Map<Integer, Row> rowMap = new HashMap<Integer, Row>();
for(Integer ounitId : ounitIds) {
Row row = observationSheet.createRow(sheetRowIndex);
sheetRowIndex++;
rowMap.put(ounitId, row);
}
//then get the data for each of the observation units (ounits)
List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>();
try {
charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterLevelElement elem : charLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
}
}
List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>();
try {
numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric level values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericLevelElement elem : numericLevels) {
String factorName = elem.getFactorName();
if(factorName != null) {
factorName = factorName.trim();
}
if(!factorName.equals("STUDY")) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
short columnIndex = columnsMap.get(factorName).shortValue();
if(columnIndex >= 0) {
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
double elemValue = 0;
if(elem.getValue() != null){
elemValue = elem.getValue().doubleValue();
}
cell.setCellValue(elemValue);
}
}
}
}
List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>();
try {
charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting character data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(CharacterDataElement elem : charDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
String value = elem.getValue();
if(value != null) {
value = value.trim();
}
cell.setCellValue(value);
}
}
List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>();
try {
numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds));
} catch(Exception ex) {
throw new DatasetExporterException("Error with getting numeric data values of study - " + name
+ ", representation - " + this.representationId, ex);
}
for(NumericDataElement elem : numericDatas) {
Row row = rowMap.get(elem.getOunitId());
if(row != null) {
String variateName = elem.getVariateName();
if(variateName != null) {
variateName = variateName.trim();
}
short columnIndex = columnsMap.get(variateName).shortValue();
Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER);
double elemValue = 0;
if(elem.getValue() != null){
elemValue = elem.getValue().doubleValue();
}
cell.setCellValue(elemValue);
}
}
}
}
}
//adjust column widths of description sheet to fit contents
for(int ctr = 0; ctr < 8; ctr++) {
if(ctr != 1) {
descriptionSheet.autoSizeColumn(ctr);
}
}
//adjust column widths of observation sheet to fit contents
for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) {
observationSheet.autoSizeColumn(ctr);
}
try {
//write the excel file
FileOutputStream fileOutputStream = new FileOutputStream(filename);
workbook.write(fileOutputStream);
fileOutputStream.close();
return fileOutputStream;
} catch(Exception ex) {
throw new DatasetExporterException("Error with writing to: " + filename, ex);
}
}
|
diff --git a/nuget-server/src/jetbrains/buildServer/nuget/server/trigger/NamedPackagesUpdateChecker.java b/nuget-server/src/jetbrains/buildServer/nuget/server/trigger/NamedPackagesUpdateChecker.java
index a15563e9..2c37592c 100644
--- a/nuget-server/src/jetbrains/buildServer/nuget/server/trigger/NamedPackagesUpdateChecker.java
+++ b/nuget-server/src/jetbrains/buildServer/nuget/server/trigger/NamedPackagesUpdateChecker.java
@@ -1,95 +1,95 @@
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.buildServer.nuget.server.trigger;
import com.intellij.openapi.diagnostic.Logger;
import jetbrains.buildServer.buildTriggers.BuildTriggerDescriptor;
import jetbrains.buildServer.buildTriggers.BuildTriggerException;
import jetbrains.buildServer.nuget.server.trigger.impl.*;
import jetbrains.buildServer.serverSide.CustomDataStorage;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Created by Eugene Petrenko ([email protected])
* Date: 14.07.11 15:41
*/
public class NamedPackagesUpdateChecker implements TriggerUpdateChecker {
private static final Logger LOG = Logger.getInstance(NamedPackagesUpdateChecker.class.getName());
public static final String KEY = "hash";
private final PackageChangesManager myPackageChangesManager;
private final TriggerRequestFactory myRequestFactory;
private final PackagesHashCalculator myCalculator;
public NamedPackagesUpdateChecker(@NotNull final PackageChangesManager packageChangesManager,
@NotNull final TriggerRequestFactory requestFactory,
@NotNull final PackagesHashCalculator calculator) {
myPackageChangesManager = packageChangesManager;
myRequestFactory = requestFactory;
myCalculator = calculator;
}
@Nullable
public BuildStartReason checkChanges(@NotNull BuildTriggerDescriptor descriptor,
@NotNull CustomDataStorage storage) throws BuildTriggerException {
final PackageCheckRequest checkRequest = myRequestFactory.createRequest(descriptor);
CheckResult result;
try {
result = myPackageChangesManager.checkPackage(checkRequest);
//no change available
} catch (Throwable t) {
LOG.warn("Failed to ckeck changes for package: " + checkRequest.getPackage().getPackageId() + ". " + t.getMessage(), t);
result = CheckResult.failed(t.getMessage());
}
if (result == null) return null;
final String error = result.getError();
if (error != null) {
throw new BuildTriggerException("Failed to check for package versions. " + error);
}
@NotNull final String newHash = myCalculator.serializeHashcode(result.getInfos());
@Nullable final String oldHash = storage.getValue(KEY);
if (LOG.isDebugEnabled()) {
LOG.debug("Package: " + checkRequest.getPackage().toString());
LOG.debug("Recieved packages hash: " + newHash);
LOG.debug(" old hash was: " + oldHash);
}
- if (!newHash.equals(oldHash)) {
+ if (oldHash == null || (!newHash.equals(oldHash) && !newHash.equals("v2"))) {
storage.putValue(KEY, newHash);
storage.flush();
}
//empty feed is error, not a trigger event,
//still, we update trigger state for that
if (result.getInfos().isEmpty()) {
throw new BuildTriggerException("Failed to check for package versions. Package " + checkRequest.getPackage().getPackageId() + " was not found in the feed");
}
if (myCalculator.isUpgradeRequired(oldHash, newHash)) {
return new BuildStartReason("NuGet Package " + checkRequest.getPackage().getPackageId() + " updated");
}
return null;
}
}
| true | true | public BuildStartReason checkChanges(@NotNull BuildTriggerDescriptor descriptor,
@NotNull CustomDataStorage storage) throws BuildTriggerException {
final PackageCheckRequest checkRequest = myRequestFactory.createRequest(descriptor);
CheckResult result;
try {
result = myPackageChangesManager.checkPackage(checkRequest);
//no change available
} catch (Throwable t) {
LOG.warn("Failed to ckeck changes for package: " + checkRequest.getPackage().getPackageId() + ". " + t.getMessage(), t);
result = CheckResult.failed(t.getMessage());
}
if (result == null) return null;
final String error = result.getError();
if (error != null) {
throw new BuildTriggerException("Failed to check for package versions. " + error);
}
@NotNull final String newHash = myCalculator.serializeHashcode(result.getInfos());
@Nullable final String oldHash = storage.getValue(KEY);
if (LOG.isDebugEnabled()) {
LOG.debug("Package: " + checkRequest.getPackage().toString());
LOG.debug("Recieved packages hash: " + newHash);
LOG.debug(" old hash was: " + oldHash);
}
if (!newHash.equals(oldHash)) {
storage.putValue(KEY, newHash);
storage.flush();
}
//empty feed is error, not a trigger event,
//still, we update trigger state for that
if (result.getInfos().isEmpty()) {
throw new BuildTriggerException("Failed to check for package versions. Package " + checkRequest.getPackage().getPackageId() + " was not found in the feed");
}
if (myCalculator.isUpgradeRequired(oldHash, newHash)) {
return new BuildStartReason("NuGet Package " + checkRequest.getPackage().getPackageId() + " updated");
}
return null;
}
| public BuildStartReason checkChanges(@NotNull BuildTriggerDescriptor descriptor,
@NotNull CustomDataStorage storage) throws BuildTriggerException {
final PackageCheckRequest checkRequest = myRequestFactory.createRequest(descriptor);
CheckResult result;
try {
result = myPackageChangesManager.checkPackage(checkRequest);
//no change available
} catch (Throwable t) {
LOG.warn("Failed to ckeck changes for package: " + checkRequest.getPackage().getPackageId() + ". " + t.getMessage(), t);
result = CheckResult.failed(t.getMessage());
}
if (result == null) return null;
final String error = result.getError();
if (error != null) {
throw new BuildTriggerException("Failed to check for package versions. " + error);
}
@NotNull final String newHash = myCalculator.serializeHashcode(result.getInfos());
@Nullable final String oldHash = storage.getValue(KEY);
if (LOG.isDebugEnabled()) {
LOG.debug("Package: " + checkRequest.getPackage().toString());
LOG.debug("Recieved packages hash: " + newHash);
LOG.debug(" old hash was: " + oldHash);
}
if (oldHash == null || (!newHash.equals(oldHash) && !newHash.equals("v2"))) {
storage.putValue(KEY, newHash);
storage.flush();
}
//empty feed is error, not a trigger event,
//still, we update trigger state for that
if (result.getInfos().isEmpty()) {
throw new BuildTriggerException("Failed to check for package versions. Package " + checkRequest.getPackage().getPackageId() + " was not found in the feed");
}
if (myCalculator.isUpgradeRequired(oldHash, newHash)) {
return new BuildStartReason("NuGet Package " + checkRequest.getPackage().getPackageId() + " updated");
}
return null;
}
|
diff --git a/liquibase-core/src/main/java/liquibase/sqlgenerator/core/AddForeignKeyConstraintGenerator.java b/liquibase-core/src/main/java/liquibase/sqlgenerator/core/AddForeignKeyConstraintGenerator.java
index 8feab424..d7a7ba9e 100644
--- a/liquibase-core/src/main/java/liquibase/sqlgenerator/core/AddForeignKeyConstraintGenerator.java
+++ b/liquibase-core/src/main/java/liquibase/sqlgenerator/core/AddForeignKeyConstraintGenerator.java
@@ -1,95 +1,95 @@
package liquibase.sqlgenerator.core;
import liquibase.database.Database;
import liquibase.database.core.InformixDatabase;
import liquibase.database.core.SQLiteDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.exception.ValidationErrors;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGenerator;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.statement.core.AddForeignKeyConstraintStatement;
public class AddForeignKeyConstraintGenerator implements SqlGenerator<AddForeignKeyConstraintStatement> {
public int getPriority() {
return PRIORITY_DEFAULT;
}
public boolean supports(AddForeignKeyConstraintStatement statement, Database database) {
return (!(database instanceof SQLiteDatabase));
}
public ValidationErrors validate(AddForeignKeyConstraintStatement addForeignKeyConstraintStatement, Database database, SqlGeneratorChain sqlGeneratorChain) {
ValidationErrors validationErrors = new ValidationErrors();
if ((addForeignKeyConstraintStatement.isInitiallyDeferred() || addForeignKeyConstraintStatement.isDeferrable()) && !database.supportsInitiallyDeferrableColumns()) {
validationErrors.checkDisallowedField("initiallyDeferred", addForeignKeyConstraintStatement.isInitiallyDeferred(), database);
validationErrors.checkDisallowedField("deferrable", addForeignKeyConstraintStatement.isDeferrable(), database);
}
validationErrors.checkRequiredField("baseColumnNames", addForeignKeyConstraintStatement.getBaseColumnNames());
validationErrors.checkRequiredField("baseTableNames", addForeignKeyConstraintStatement.getBaseTableName());
validationErrors.checkRequiredField("referencedColumnNames", addForeignKeyConstraintStatement.getReferencedColumnNames());
validationErrors.checkRequiredField("referencedTableName", addForeignKeyConstraintStatement.getReferencedTableName());
return validationErrors;
}
public Sql[] generateSql(AddForeignKeyConstraintStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
// If database doesn't support FK referenced on unique columns - skip FK statement generation
- if (!statement.isReferencedToPrimary() && !(database instanceof OracleDatabase)) {
+ if (statement.isReferencedToPrimary() == null || (!statement.isReferencedToPrimary() && !(database instanceof OracleDatabase))) {
return new Sql[0];
}
StringBuilder sb = new StringBuilder();
sb.append("ALTER TABLE ")
.append(database.escapeTableName(statement.getBaseTableSchemaName(), statement.getBaseTableName()))
.append(" ADD CONSTRAINT ");
if (!(database instanceof InformixDatabase)) {
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
sb.append(" FOREIGN KEY (")
.append(database.escapeColumnNameList(statement.getBaseColumnNames()))
.append(") REFERENCES ")
.append(database.escapeTableName(statement.getReferencedTableSchemaName(), statement.getReferencedTableName()))
.append("(")
.append(database.escapeColumnNameList(statement.getReferencedColumnNames()))
.append(")");
if (statement.getOnUpdate() != null) {
if ((database instanceof OracleDatabase) && statement.getOnUpdate().equalsIgnoreCase("RESTRICT")) {
//don't use
} else {
sb.append(" ON UPDATE ").append(statement.getOnUpdate());
}
}
if (statement.getOnDelete() != null) {
if ((database instanceof OracleDatabase) && (statement.getOnDelete().equalsIgnoreCase("RESTRICT") || statement.getOnDelete().equalsIgnoreCase("NO ACTION"))) {
//don't use
} else {
sb.append(" ON DELETE ").append(statement.getOnDelete());
}
}
if (statement.isDeferrable() || statement.isInitiallyDeferred()) {
if (statement.isDeferrable()) {
sb.append(" DEFERRABLE");
}
if (statement.isInitiallyDeferred()) {
sb.append(" INITIALLY DEFERRED");
}
}
if (database instanceof InformixDatabase) {
sb.append(" CONSTRAINT ");
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
return new Sql[]{
new UnparsedSql(sb.toString())
};
}
}
| true | true | public Sql[] generateSql(AddForeignKeyConstraintStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
// If database doesn't support FK referenced on unique columns - skip FK statement generation
if (!statement.isReferencedToPrimary() && !(database instanceof OracleDatabase)) {
return new Sql[0];
}
StringBuilder sb = new StringBuilder();
sb.append("ALTER TABLE ")
.append(database.escapeTableName(statement.getBaseTableSchemaName(), statement.getBaseTableName()))
.append(" ADD CONSTRAINT ");
if (!(database instanceof InformixDatabase)) {
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
sb.append(" FOREIGN KEY (")
.append(database.escapeColumnNameList(statement.getBaseColumnNames()))
.append(") REFERENCES ")
.append(database.escapeTableName(statement.getReferencedTableSchemaName(), statement.getReferencedTableName()))
.append("(")
.append(database.escapeColumnNameList(statement.getReferencedColumnNames()))
.append(")");
if (statement.getOnUpdate() != null) {
if ((database instanceof OracleDatabase) && statement.getOnUpdate().equalsIgnoreCase("RESTRICT")) {
//don't use
} else {
sb.append(" ON UPDATE ").append(statement.getOnUpdate());
}
}
if (statement.getOnDelete() != null) {
if ((database instanceof OracleDatabase) && (statement.getOnDelete().equalsIgnoreCase("RESTRICT") || statement.getOnDelete().equalsIgnoreCase("NO ACTION"))) {
//don't use
} else {
sb.append(" ON DELETE ").append(statement.getOnDelete());
}
}
if (statement.isDeferrable() || statement.isInitiallyDeferred()) {
if (statement.isDeferrable()) {
sb.append(" DEFERRABLE");
}
if (statement.isInitiallyDeferred()) {
sb.append(" INITIALLY DEFERRED");
}
}
if (database instanceof InformixDatabase) {
sb.append(" CONSTRAINT ");
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
return new Sql[]{
new UnparsedSql(sb.toString())
};
}
| public Sql[] generateSql(AddForeignKeyConstraintStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
// If database doesn't support FK referenced on unique columns - skip FK statement generation
if (statement.isReferencedToPrimary() == null || (!statement.isReferencedToPrimary() && !(database instanceof OracleDatabase))) {
return new Sql[0];
}
StringBuilder sb = new StringBuilder();
sb.append("ALTER TABLE ")
.append(database.escapeTableName(statement.getBaseTableSchemaName(), statement.getBaseTableName()))
.append(" ADD CONSTRAINT ");
if (!(database instanceof InformixDatabase)) {
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
sb.append(" FOREIGN KEY (")
.append(database.escapeColumnNameList(statement.getBaseColumnNames()))
.append(") REFERENCES ")
.append(database.escapeTableName(statement.getReferencedTableSchemaName(), statement.getReferencedTableName()))
.append("(")
.append(database.escapeColumnNameList(statement.getReferencedColumnNames()))
.append(")");
if (statement.getOnUpdate() != null) {
if ((database instanceof OracleDatabase) && statement.getOnUpdate().equalsIgnoreCase("RESTRICT")) {
//don't use
} else {
sb.append(" ON UPDATE ").append(statement.getOnUpdate());
}
}
if (statement.getOnDelete() != null) {
if ((database instanceof OracleDatabase) && (statement.getOnDelete().equalsIgnoreCase("RESTRICT") || statement.getOnDelete().equalsIgnoreCase("NO ACTION"))) {
//don't use
} else {
sb.append(" ON DELETE ").append(statement.getOnDelete());
}
}
if (statement.isDeferrable() || statement.isInitiallyDeferred()) {
if (statement.isDeferrable()) {
sb.append(" DEFERRABLE");
}
if (statement.isInitiallyDeferred()) {
sb.append(" INITIALLY DEFERRED");
}
}
if (database instanceof InformixDatabase) {
sb.append(" CONSTRAINT ");
sb.append(database.escapeConstraintName(statement.getConstraintName()));
}
return new Sql[]{
new UnparsedSql(sb.toString())
};
}
|
diff --git a/src/main/java/org/diyefi/openlogviewer/graphing/SingleGraphPanel.java b/src/main/java/org/diyefi/openlogviewer/graphing/SingleGraphPanel.java
index a5c4555..824ae62 100644
--- a/src/main/java/org/diyefi/openlogviewer/graphing/SingleGraphPanel.java
+++ b/src/main/java/org/diyefi/openlogviewer/graphing/SingleGraphPanel.java
@@ -1,452 +1,454 @@
/* OpenLogViewer
*
* Copyright 2011
*
* This file is part of the OpenLogViewer project.
*
* OpenLogViewer software is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenLogViewer software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with any OpenLogViewer software. If not, see http://www.gnu.org/licenses/
*
* I ask that if you make any changes to this file you fork the code on github.com!
*
*/
package org.diyefi.openlogviewer.graphing;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.event.HierarchyBoundsListener;
import java.awt.event.HierarchyEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JPanel;
import org.diyefi.openlogviewer.OpenLogViewer;
import org.diyefi.openlogviewer.genericlog.GenericDataElement;
import org.diyefi.openlogviewer.utils.MathUtils;
/**
* SingleGraphPanel is a JPanel that uses a transparent background.
* The graph trace is drawn to this panel and used in conjunction with a JLayeredPane
* to give the appearance of all the graph traces drawn together.
*
* This layer listens for window resizes and property changes.
* @author Bryan Harris and Ben Fenner
*/
public class SingleGraphPanel extends JPanel implements HierarchyBoundsListener, PropertyChangeListener {
private static final long serialVersionUID = 1L;
private static final double GRAPH_TRACE_SIZE_AS_PERCENTAGE_OF_TOTAL_GRAPH_SIZE = 0.95;
private GenericDataElement GDE;
private double[] dataPointsToDisplay;
private double[][] dataPointRangeInfo;
private int availableDataRecords;
public SingleGraphPanel() {
this.setOpaque(false);
this.setLayout(null);
this.GDE = null;
dataPointsToDisplay = null;
dataPointRangeInfo = null;
}
@Override
public void ancestorMoved(final HierarchyEvent e) {
}
@Override
public final void ancestorResized(final HierarchyEvent e) {
if (e.getID() == HierarchyEvent.ANCESTOR_RESIZED) {
sizeGraph();
}
}
@Override
public final void propertyChange(final PropertyChangeEvent evt) {
if (evt.getPropertyName().equalsIgnoreCase("Split")) {
sizeGraph();
}
}
@Override
public final void paint(final Graphics g) { // overridden paint because there will be no other painting other than this
boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
if(zoomedOut){
initGraphZoomedOut();
} else{
initGraphZoomed();
}
if (hasDataPointToDisplay()) {
paintDataPointsAndTraces(g);
}
}
private void paintDataPointsAndTraces(final Graphics g) {
// Setup graphics stuff
final Graphics2D g2d = (Graphics2D) g;
g2d.setColor(GDE.getDisplayColor());
// Initialize current, previous and next graph trace data points
double leftOfTraceData = -Double.MAX_VALUE;
double traceData = -Double.MAX_VALUE;
double rightOfTraceData = dataPointsToDisplay[0];
// Initialize graph status markers
boolean atGraphBeginning = false;
boolean insideGraph = false;
boolean atGraphEnd = false;
// Initialize and setup data point screen location stuff
final boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
if(zoomedOut){
zoom = 1;
}
final double graphPosition = OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
final double offset = (graphPosition % 1) * zoom;
int screenPositionXCoord = -(int)Math.round(offset); // Start with one point off-screen to the left
int screenPositionYCoord = Integer.MIN_VALUE;
int nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Draw data points and trace lines from left to right including one off screen to the right
for (int i = 0; i < dataPointsToDisplay.length; i++) {
// Setup current, previous and next graph trace data points
if (i > 0){
leftOfTraceData = dataPointsToDisplay[i - 1];
} else {
leftOfTraceData = -Double.MAX_VALUE;
}
traceData = dataPointsToDisplay[i];
if (i + 1 < dataPointsToDisplay.length){
rightOfTraceData = dataPointsToDisplay[i + 1];
} else {
rightOfTraceData = -Double.MAX_VALUE;
}
// Setup data point screen location stuff
screenPositionYCoord = nextScreenPositionYCoord;
nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Setup graph states and draw graph beginning and end markers
if(leftOfTraceData == -Double.MAX_VALUE && traceData != -Double.MAX_VALUE){
// At graph beginning
- g2d.drawLine(screenPositionXCoord - 2, screenPositionYCoord - 2, screenPositionXCoord - 2, screenPositionYCoord + 2);
+ g2d.drawLine(screenPositionXCoord - 4, screenPositionYCoord - 2, screenPositionXCoord - 2, screenPositionYCoord);
+ g2d.drawLine(screenPositionXCoord - 2, screenPositionYCoord, screenPositionXCoord - 4, screenPositionYCoord + 2);
atGraphBeginning = true;
insideGraph = true;
}
if(traceData != -Double.MAX_VALUE && rightOfTraceData == -Double.MAX_VALUE){
// At graph end
- g2d.drawLine(screenPositionXCoord + 2, screenPositionYCoord - 2, screenPositionXCoord + 2, screenPositionYCoord + 2);
+ g2d.drawLine(screenPositionXCoord + 4, screenPositionYCoord - 2, screenPositionXCoord + 2, screenPositionYCoord);
+ g2d.drawLine(screenPositionXCoord + 2, screenPositionYCoord, screenPositionXCoord + 4, screenPositionYCoord + 2);
atGraphEnd = true;
}
// Draw data point
if(!zoomedOut && zoom > 5){
// Draw fat data point
if (atGraphBeginning){
if (traceData != rightOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (atGraphEnd){
if (traceData != leftOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (insideGraph) {
if (traceData != leftOfTraceData || traceData != rightOfTraceData){
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
}
} else if (insideGraph) {
// Draw small data point
// drawLine() is 33% faster than fillRect() for a single pixel on Ben's dev machine
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord, screenPositionYCoord);
}
// Draw graph trace line
if (insideGraph && !atGraphEnd){
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord + zoom, nextScreenPositionYCoord);
}
// Reset graph states
if(atGraphEnd){
insideGraph = false;
}
atGraphBeginning = false;
// Move to the right in preparation of drawing more
screenPositionXCoord += zoom;
}
}
private int getScreenPositionYCoord(final Double traceData, final double minValue, final double maxValue) {
int point = 0;
final int height = (int) (this.getHeight() * GRAPH_TRACE_SIZE_AS_PERCENTAGE_OF_TOTAL_GRAPH_SIZE);
if (maxValue != minValue) {
point = (int) (height - (height * ((traceData - minValue) / (maxValue - minValue))));
}
return point;
}
private boolean hasDataPointToDisplay() {
boolean result = false;
if ((dataPointsToDisplay != null) && (dataPointsToDisplay.length > 0)) {
result = true;
}
return result;
}
/**
* this is where the GDE is referenced and the graph gets initialized for the first time
* @param GDE
*/
public final void setData(final GenericDataElement GDE) {
this.GDE = GDE;
this.availableDataRecords = GDE.size() + 1; // Size is currently position, this will need cleaning up later, leave it to me.
// The main thing is to take away 10 calls to the GDE per view on something that is fairly static and cache it internally
sizeGraph();
}
public final GenericDataElement getData() {
return GDE;
}
/**
* Used for InfoLayer to get the data from the single graphs for data under the mouse
*
* @param pointerDistanceFromCenter
* @return Double representation of info at the mouse cursor line which snaps to data points or null if no data under cursor
*/
public final String getMouseInfo(final int cursorPosition) {
boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
String info = "-.-";
if(zoomedOut){
info = getMouseInfoZoomedOut(cursorPosition);
} else {
info = getMouseInfoZoomed(cursorPosition);
}
return info;
}
/**
* Used for InfoLayer to get the data from the single graphs for data under the mouse when not zoomed out
*
* @param pointerDistanceFromCenter
* @return Double representation of info at the mouse cursor line which snaps to data points or null if no data under cursor
*/
private final String getMouseInfoZoomed(final int cursorPosition){
String result = "-.-";
final double graphPosition = OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
final int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
final double offset = (graphPosition % 1) * zoom;
final int cursorPositionPlusOffset = cursorPosition + (int) offset;
double numSnapsFromCenter = ((double) cursorPositionPlusOffset / (double) zoom);
numSnapsFromCenter = Math.round(numSnapsFromCenter);
final int dataLocation = (int) graphPosition + (int) numSnapsFromCenter;
if ((dataLocation >= 0) && (dataLocation < availableDataRecords)) {
double data = GDE.get(dataLocation);
data = MathUtils.INSTANCE.roundToSignificantFigures(data, 6);
result = Double.toString(data);
if(result.length() > 8){
result = result.substring(0, 8);
}
}
return result;
}
/**
* Used for InfoLayer to get the data from the single graphs for data under the mouse when zoomed out
*
* @param pointerDistanceFromCenter
* @return Double representation of info at the mouse cursor line which snaps to data points or null if no data under cursor
*/
private final String getMouseInfoZoomedOut(int cursorPosition){
String result = "-.- | -.- | -.-";
if ((cursorPosition >= 0) && (cursorPosition < dataPointRangeInfo.length)) {
double minData = dataPointRangeInfo[cursorPosition][0];
double meanData = dataPointRangeInfo[cursorPosition][1];
double maxData = dataPointRangeInfo[cursorPosition][2];
if(minData != -Double.MAX_VALUE){
minData = MathUtils.INSTANCE.roundToSignificantFigures(minData, 6);
maxData = MathUtils.INSTANCE.roundToSignificantFigures(maxData, 6);
String resultMin = Double.toString(minData);
String resultMax = Double.toString(maxData);
if(resultMin.length() > 8){
resultMin = resultMin.substring(0, 8);
}
if(resultMax.length() > 8){
resultMax = resultMax.substring(0, 8);
}
meanData = MathUtils.INSTANCE.roundToSignificantFigures(meanData, 6);
String resultMean = Double.toString(meanData);
if(resultMin.length() > resultMax.length() && resultMin.length() < resultMean.length()){
meanData = MathUtils.INSTANCE.roundToSignificantFigures(meanData, resultMin.length() - 2);
resultMean = resultMean.substring(0, resultMin.length());
} else if (resultMax.length() < resultMean.length()){
meanData = MathUtils.INSTANCE.roundToSignificantFigures(meanData, resultMax.length() - 2);
resultMean = resultMean.substring(0, resultMax.length());
}
result = resultMin + " | " + resultMean + " | " + resultMax;
}
}
return result;
}
public final Color getColor() {
return GDE.getDisplayColor();
}
public final void setColor(final Color c) {
GDE.setDisplayColor(c);
}
/**
* initialize the graph any time you need to paint
*/
public final void initGraphZoomed() {
if (GDE != null) {
final int graphPosition = (int)OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
int graphWindowWidth = OpenLogViewer.getInstance().getEntireGraphingPanel().getWidth();
final int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
int numberOfPointsThatFitInDisplay = graphWindowWidth / zoom;
numberOfPointsThatFitInDisplay += 3; // Add three for off-screen points to the right
dataPointsToDisplay = new double[numberOfPointsThatFitInDisplay];
int position = graphPosition;
// Setup data points.
for (int i = 0; i < numberOfPointsThatFitInDisplay; i++) {
if (position >= 0 && position < availableDataRecords) {
dataPointsToDisplay[i] = GDE.get(position);
} else {
dataPointsToDisplay[i] = -Double.MAX_VALUE;
}
position++;
}
}
}
/**
* initialize the graph any time you need to paint
*/
public final void initGraphZoomedOut() {
if (GDE != null) {
final int graphPosition = (int)OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
int graphWindowWidth = OpenLogViewer.getInstance().getEntireGraphingPanel().getWidth();
final int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
dataPointsToDisplay = new double[graphWindowWidth + 1]; // Add one data point for off-screen to the right
dataPointRangeInfo = new double[graphWindowWidth + 1][3]; // Add one data point for off-screen to the right
final int numberOfRealPointsThatFitInDisplay = (graphWindowWidth * zoom) + zoom; // Add one data point for off-screen to the right
final int rightGraphPosition = graphPosition + numberOfRealPointsThatFitInDisplay;
/*
* Setup data points.
*
* The data point to display is calculated by taking the average of
* the data point spread and comparing it to the previous calculated
* data point. If the average is higher, then the highest value of
* the data spread is used. If the average is lower, then the lowest
* value of the data point spread is used.
*
* In other words, if the graph is trending upward, the peak is used.
* If the graph is trending downward, the valley is used.
* This keeps the peaks and valleys intact and the middle stuff is
* lost. This maintains the general shape of the graph, and assumes
* that local peaks and valleys are the most interesting parts of the
* graph to display.
*/
int nextAarrayIndex = 0;
double leftOfNewData = GDE.get(0);
for (int i = graphPosition; i < rightGraphPosition; i+=zoom) {
if (i >= 0 && i < availableDataRecords) {
double minData = Double.MAX_VALUE;
double maxData = -Double.MAX_VALUE;
double newData = 0.0;
double acummulateData = 0.0;
int divisor = 0;
for (int j = 0; j < zoom; j++){
if (i + j >= 0 && i + j < availableDataRecords) {
newData = GDE.get(i + j);
acummulateData += newData;
divisor++;
if (newData < minData){
minData = newData;
}
if (newData > maxData){
maxData = newData;
}
}
}
double averageData = acummulateData / divisor;
if (averageData > leftOfNewData){
dataPointsToDisplay[nextAarrayIndex] = maxData;
leftOfNewData = maxData;
} else if (averageData < leftOfNewData){
dataPointsToDisplay[nextAarrayIndex] = minData;
leftOfNewData = minData;
} else {
dataPointsToDisplay[nextAarrayIndex] = averageData;
leftOfNewData = averageData;
}
dataPointRangeInfo[nextAarrayIndex][0] = minData;
dataPointRangeInfo[nextAarrayIndex][1] = averageData;
dataPointRangeInfo[nextAarrayIndex][2] = maxData;
nextAarrayIndex++;
} else {
dataPointsToDisplay[nextAarrayIndex] = -Double.MAX_VALUE;
dataPointRangeInfo[nextAarrayIndex][0] = -Double.MAX_VALUE;
dataPointRangeInfo[nextAarrayIndex][1] = -Double.MAX_VALUE;
dataPointRangeInfo[nextAarrayIndex][2] = -Double.MAX_VALUE;
nextAarrayIndex++;
}
}
}
}
/**
* maintains the size of the graph when applying divisions
*/
public final void sizeGraph() {
final MultiGraphLayeredPane lg = OpenLogViewer.getInstance().getMultiGraphLayeredPane();
int wherePixel = 0;
if (lg.getTotalSplits() > 1) {
if (GDE.getSplitNumber() <= lg.getTotalSplits()) {
wherePixel += lg.getHeight() / lg.getTotalSplits() * GDE.getSplitNumber() - (lg.getHeight() / lg.getTotalSplits());
} else {
wherePixel += lg.getHeight() / lg.getTotalSplits() * lg.getTotalSplits() - (lg.getHeight() / lg.getTotalSplits());
}
}
this.setBounds(0, wherePixel, lg.getWidth(), lg.getHeight() / (lg.getTotalSplits()));
final boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
if(zoomedOut){
initGraphZoomedOut();
} else {
initGraphZoomed();
}
}
/**
* Graph total size
* @return GDE.size()
*/
public final int graphSize() {
return availableDataRecords;
}
}
| false | true | private void paintDataPointsAndTraces(final Graphics g) {
// Setup graphics stuff
final Graphics2D g2d = (Graphics2D) g;
g2d.setColor(GDE.getDisplayColor());
// Initialize current, previous and next graph trace data points
double leftOfTraceData = -Double.MAX_VALUE;
double traceData = -Double.MAX_VALUE;
double rightOfTraceData = dataPointsToDisplay[0];
// Initialize graph status markers
boolean atGraphBeginning = false;
boolean insideGraph = false;
boolean atGraphEnd = false;
// Initialize and setup data point screen location stuff
final boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
if(zoomedOut){
zoom = 1;
}
final double graphPosition = OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
final double offset = (graphPosition % 1) * zoom;
int screenPositionXCoord = -(int)Math.round(offset); // Start with one point off-screen to the left
int screenPositionYCoord = Integer.MIN_VALUE;
int nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Draw data points and trace lines from left to right including one off screen to the right
for (int i = 0; i < dataPointsToDisplay.length; i++) {
// Setup current, previous and next graph trace data points
if (i > 0){
leftOfTraceData = dataPointsToDisplay[i - 1];
} else {
leftOfTraceData = -Double.MAX_VALUE;
}
traceData = dataPointsToDisplay[i];
if (i + 1 < dataPointsToDisplay.length){
rightOfTraceData = dataPointsToDisplay[i + 1];
} else {
rightOfTraceData = -Double.MAX_VALUE;
}
// Setup data point screen location stuff
screenPositionYCoord = nextScreenPositionYCoord;
nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Setup graph states and draw graph beginning and end markers
if(leftOfTraceData == -Double.MAX_VALUE && traceData != -Double.MAX_VALUE){
// At graph beginning
g2d.drawLine(screenPositionXCoord - 2, screenPositionYCoord - 2, screenPositionXCoord - 2, screenPositionYCoord + 2);
atGraphBeginning = true;
insideGraph = true;
}
if(traceData != -Double.MAX_VALUE && rightOfTraceData == -Double.MAX_VALUE){
// At graph end
g2d.drawLine(screenPositionXCoord + 2, screenPositionYCoord - 2, screenPositionXCoord + 2, screenPositionYCoord + 2);
atGraphEnd = true;
}
// Draw data point
if(!zoomedOut && zoom > 5){
// Draw fat data point
if (atGraphBeginning){
if (traceData != rightOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (atGraphEnd){
if (traceData != leftOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (insideGraph) {
if (traceData != leftOfTraceData || traceData != rightOfTraceData){
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
}
} else if (insideGraph) {
// Draw small data point
// drawLine() is 33% faster than fillRect() for a single pixel on Ben's dev machine
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord, screenPositionYCoord);
}
// Draw graph trace line
if (insideGraph && !atGraphEnd){
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord + zoom, nextScreenPositionYCoord);
}
// Reset graph states
if(atGraphEnd){
insideGraph = false;
}
atGraphBeginning = false;
// Move to the right in preparation of drawing more
screenPositionXCoord += zoom;
}
}
| private void paintDataPointsAndTraces(final Graphics g) {
// Setup graphics stuff
final Graphics2D g2d = (Graphics2D) g;
g2d.setColor(GDE.getDisplayColor());
// Initialize current, previous and next graph trace data points
double leftOfTraceData = -Double.MAX_VALUE;
double traceData = -Double.MAX_VALUE;
double rightOfTraceData = dataPointsToDisplay[0];
// Initialize graph status markers
boolean atGraphBeginning = false;
boolean insideGraph = false;
boolean atGraphEnd = false;
// Initialize and setup data point screen location stuff
final boolean zoomedOut = OpenLogViewer.getInstance().getEntireGraphingPanel().isZoomedOutBeyondOneToOne();
int zoom = OpenLogViewer.getInstance().getEntireGraphingPanel().getZoom();
if(zoomedOut){
zoom = 1;
}
final double graphPosition = OpenLogViewer.getInstance().getEntireGraphingPanel().getGraphPosition();
final double offset = (graphPosition % 1) * zoom;
int screenPositionXCoord = -(int)Math.round(offset); // Start with one point off-screen to the left
int screenPositionYCoord = Integer.MIN_VALUE;
int nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Draw data points and trace lines from left to right including one off screen to the right
for (int i = 0; i < dataPointsToDisplay.length; i++) {
// Setup current, previous and next graph trace data points
if (i > 0){
leftOfTraceData = dataPointsToDisplay[i - 1];
} else {
leftOfTraceData = -Double.MAX_VALUE;
}
traceData = dataPointsToDisplay[i];
if (i + 1 < dataPointsToDisplay.length){
rightOfTraceData = dataPointsToDisplay[i + 1];
} else {
rightOfTraceData = -Double.MAX_VALUE;
}
// Setup data point screen location stuff
screenPositionYCoord = nextScreenPositionYCoord;
nextScreenPositionYCoord = getScreenPositionYCoord(rightOfTraceData, GDE.getDisplayMinValue(), GDE.getDisplayMaxValue());
// Setup graph states and draw graph beginning and end markers
if(leftOfTraceData == -Double.MAX_VALUE && traceData != -Double.MAX_VALUE){
// At graph beginning
g2d.drawLine(screenPositionXCoord - 4, screenPositionYCoord - 2, screenPositionXCoord - 2, screenPositionYCoord);
g2d.drawLine(screenPositionXCoord - 2, screenPositionYCoord, screenPositionXCoord - 4, screenPositionYCoord + 2);
atGraphBeginning = true;
insideGraph = true;
}
if(traceData != -Double.MAX_VALUE && rightOfTraceData == -Double.MAX_VALUE){
// At graph end
g2d.drawLine(screenPositionXCoord + 4, screenPositionYCoord - 2, screenPositionXCoord + 2, screenPositionYCoord);
g2d.drawLine(screenPositionXCoord + 2, screenPositionYCoord, screenPositionXCoord + 4, screenPositionYCoord + 2);
atGraphEnd = true;
}
// Draw data point
if(!zoomedOut && zoom > 5){
// Draw fat data point
if (atGraphBeginning){
if (traceData != rightOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (atGraphEnd){
if (traceData != leftOfTraceData) {
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
} else if (insideGraph) {
if (traceData != leftOfTraceData || traceData != rightOfTraceData){
// fillRect() is 95% faster than fillOval() for a 3x3 square on Ben's dev machine
g2d.fillRect(screenPositionXCoord - 1, screenPositionYCoord - 1, 3, 3);
}
}
} else if (insideGraph) {
// Draw small data point
// drawLine() is 33% faster than fillRect() for a single pixel on Ben's dev machine
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord, screenPositionYCoord);
}
// Draw graph trace line
if (insideGraph && !atGraphEnd){
g2d.drawLine(screenPositionXCoord, screenPositionYCoord, screenPositionXCoord + zoom, nextScreenPositionYCoord);
}
// Reset graph states
if(atGraphEnd){
insideGraph = false;
}
atGraphBeginning = false;
// Move to the right in preparation of drawing more
screenPositionXCoord += zoom;
}
}
|
diff --git a/src/com/csipsimple/wizards/impl/FastVoip.java b/src/com/csipsimple/wizards/impl/FastVoip.java
index 0df26e89..98aeb628 100644
--- a/src/com/csipsimple/wizards/impl/FastVoip.java
+++ b/src/com/csipsimple/wizards/impl/FastVoip.java
@@ -1,44 +1,44 @@
/**
* Copyright (C) 2010 Regis Montoya (aka r3gis - www.r3gis.fr)
* This file is part of CSipSimple.
*
* CSipSimple is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* CSipSimple is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with CSipSimple. If not, see <http://www.gnu.org/licenses/>.
*/
package com.csipsimple.wizards.impl;
import com.csipsimple.api.SipProfile;
public class FastVoip extends SimpleImplementation {
@Override
protected String getDomain() {
return "fastvoip.com";
}
@Override
protected String getDefaultName() {
return "FastVoip";
}
@Override
public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
- acc.proxies = new String[] {"sip:proxy.fastvoip.com"};
+ acc.proxies = new String[] {"sip:sip.fastvoip.com"};
acc.transport = SipProfile.TRANSPORT_UDP;
return acc;
}
}
| true | true | public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
acc.proxies = new String[] {"sip:proxy.fastvoip.com"};
acc.transport = SipProfile.TRANSPORT_UDP;
return acc;
}
| public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
acc.proxies = new String[] {"sip:sip.fastvoip.com"};
acc.transport = SipProfile.TRANSPORT_UDP;
return acc;
}
|
diff --git a/src/net/sf/hajdbc/sync/DifferentialSynchronizationStrategy.java b/src/net/sf/hajdbc/sync/DifferentialSynchronizationStrategy.java
index 89b2e1f2..0e3b1f62 100644
--- a/src/net/sf/hajdbc/sync/DifferentialSynchronizationStrategy.java
+++ b/src/net/sf/hajdbc/sync/DifferentialSynchronizationStrategy.java
@@ -1,520 +1,520 @@
/*
* HA-JDBC: High-Availability JDBC
* Copyright (c) 2004-2006 Paul Ferraro
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation; either version 2.1 of the License, or (at your
* option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Contact: [email protected]
*/
package net.sf.hajdbc.sync;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import net.sf.hajdbc.DatabaseMetaDataCache;
import net.sf.hajdbc.Dialect;
import net.sf.hajdbc.ForeignKeyConstraint;
import net.sf.hajdbc.Messages;
import net.sf.hajdbc.SynchronizationStrategy;
import net.sf.hajdbc.TableProperties;
import net.sf.hajdbc.UniqueConstraint;
import net.sf.hajdbc.util.Strings;
import net.sf.hajdbc.util.concurrent.DaemonThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Database-independent synchronization strategy that only updates differences between two databases.
* This strategy is best used when there are <em>few</em> differences between the active database and the inactive database (i.e. barely out of sync).
* The following algorithm is used:
* <ol>
* <li>Drop the foreign keys on the inactive database (to avoid integrity constraint violations)</li>
* <li>For each database table:
* <ol>
* <li>Drop the unique constraints on the table (to avoid integrity constraint violations)</li>
* <li>Find the primary key(s) of the table</li>
* <li>Query all rows in the inactive database table, sorting by the primary key(s)</li>
* <li>Query all rows on the active database table</li>
* <li>For each row in table:
* <ol>
* <li>If primary key of the rows are the same, determine whether or not row needs to be updated</li>
* <li>Otherwise, determine whether row should be deleted, or a new row is to be inserted</li>
* </ol>
* </li>
* <li>Re-create the unique constraints on the table (to avoid integrity constraint violations)</li>
* </ol>
* </li>
* <li>Re-create the foreign keys on the inactive database</li>
* <li>Synchronize sequences</li>
* </ol>
* @author Paul Ferraro
* @version $Revision$
* @since 1.0
*/
public class DifferentialSynchronizationStrategy implements SynchronizationStrategy
{
private static Logger logger = LoggerFactory.getLogger(DifferentialSynchronizationStrategy.class);
private ExecutorService executor = Executors.newSingleThreadExecutor(DaemonThreadFactory.getInstance());
private int fetchSize = 0;
/**
* @see net.sf.hajdbc.SynchronizationStrategy#synchronize(java.sql.Connection, java.sql.Connection, net.sf.hajdbc.DatabaseMetaDataCache, net.sf.hajdbc.Dialect)
*/
public void synchronize(Connection inactiveConnection, Connection activeConnection, DatabaseMetaDataCache metaData, Dialect dialect) throws SQLException
{
inactiveConnection.setAutoCommit(true);
Statement statement = inactiveConnection.createStatement();
Collection<TableProperties> tables = metaData.getDatabaseProperties(inactiveConnection).getTables();
// Drop foreign key constraints on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getDropForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<Short, String> primaryKeyColumnMap = new TreeMap<Short, String>();
Set<Integer> primaryKeyColumnIndexSet = new LinkedHashSet<Integer>();
inactiveConnection.setAutoCommit(false);
try
{
for (TableProperties table: tables)
{
primaryKeyColumnMap.clear();
primaryKeyColumnIndexSet.clear();
UniqueConstraint primaryKey = table.getPrimaryKey();
if (primaryKey == null)
{
- throw new SQLException(Messages.getMessage(Messages.PRIMARY_KEY_REQUIRED, this.getClass().getName(), table));
+ throw new SQLException(Messages.getMessage(Messages.PRIMARY_KEY_REQUIRED, this.getClass().getName(), table.getName()));
}
List<String> primaryKeyColumnList = primaryKey.getColumnList();
Collection<UniqueConstraint> constraints = table.getUniqueConstraints();
constraints.remove(primaryKey);
// Drop unique constraints on the current table
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getDropUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
Collection<String> columns = table.getColumns();
// List of colums for select statement - starting with primary key
List<String> columnList = new ArrayList<String>(columns);
columnList.addAll(primaryKeyColumnList);
for (String column: columns)
{
if (!primaryKeyColumnList.contains(column))
{
columnList.add(column);
}
}
List<String> nonPrimaryKeyColumnList = columnList.subList(primaryKeyColumnList.size(), columnList.size());
String commaDelimitedColumns = Strings.join(columnList, ", ");
// Retrieve table rows in primary key order
final String selectSQL = "SELECT " + commaDelimitedColumns + " FROM " + table.getName() + " ORDER BY " + Strings.join(primaryKeyColumnList, ", ");
final Statement inactiveStatement = inactiveConnection.createStatement();
inactiveStatement.setFetchSize(this.fetchSize);
logger.debug(selectSQL);
Callable<ResultSet> callable = new Callable<ResultSet>()
{
public ResultSet call() throws java.sql.SQLException
{
return inactiveStatement.executeQuery(selectSQL);
}
};
Future<ResultSet> future = this.executor.submit(callable);
Statement activeStatement = activeConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
activeStatement.setFetchSize(this.fetchSize);
ResultSet activeResultSet = activeStatement.executeQuery(selectSQL);
ResultSet inactiveResultSet = future.get();
String primaryKeyWhereClause = " WHERE " + Strings.join(primaryKeyColumnList, " = ? AND ") + " = ?";
// Construct DELETE SQL
String deleteSQL = "DELETE FROM " + table.getName() + primaryKeyWhereClause;
logger.debug(deleteSQL.toString());
PreparedStatement deleteStatement = inactiveConnection.prepareStatement(deleteSQL);
String[] parameters = new String[columnList.size()];
Arrays.fill(parameters, "?");
// Construct INSERT SQL
String insertSQL = "INSERT INTO " + table.getName() + " (" + commaDelimitedColumns + ") VALUES (" + Strings.join(Arrays.asList(parameters), ", ") + ")";
logger.debug(insertSQL);
PreparedStatement insertStatement = inactiveConnection.prepareStatement(insertSQL);
// Construct UPDATE SQL
String updateSQL = "UPDATE " + table.getName() + " SET " + Strings.join(nonPrimaryKeyColumnList, " = ?, ") + " = ?" + primaryKeyWhereClause;
logger.debug(updateSQL);
PreparedStatement updateStatement = inactiveConnection.prepareStatement(updateSQL);
boolean hasMoreActiveResults = activeResultSet.next();
boolean hasMoreInactiveResults = inactiveResultSet.next();
int insertCount = 0;
int updateCount = 0;
int deleteCount = 0;
while (hasMoreActiveResults || hasMoreInactiveResults)
{
int compare = 0;
if (!hasMoreActiveResults)
{
compare = 1;
}
else if (!hasMoreInactiveResults)
{
compare = -1;
}
else
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
// We assume that the primary keys column types are Comparable
compare = this.compare(activeObject, inactiveObject);
if (compare != 0)
{
break;
}
}
}
if (compare > 0)
{
deleteStatement.clearParameters();
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
deleteStatement.setObject(i, inactiveResultSet.getObject(i), type);
}
deleteStatement.addBatch();
deleteCount += 1;
}
else if (compare < 0)
{
insertStatement.clearParameters();
for (int i = 1; i <= columnList.size(); ++i)
{
Object object = activeResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
if (activeResultSet.wasNull())
{
insertStatement.setNull(i, type);
}
else
{
insertStatement.setObject(i, object, type);
}
}
insertStatement.addBatch();
insertCount += 1;
}
else // if (compare == 0)
{
updateStatement.clearParameters();
boolean updated = false;
for (int i = primaryKeyColumnList.size() + 1; i <= columnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
int index = i - primaryKeyColumnList.size();
if (activeResultSet.wasNull())
{
updateStatement.setNull(index, type);
updated |= !inactiveResultSet.wasNull();
}
else
{
updateStatement.setObject(index, activeObject, type);
updated |= inactiveResultSet.wasNull();
updated |= !equals(activeObject, inactiveObject);
}
}
if (updated)
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
updateStatement.setObject(i + nonPrimaryKeyColumnList.size(), inactiveResultSet.getObject(i), type);
}
updateStatement.addBatch();
updateCount += 1;
}
}
if (hasMoreActiveResults && (compare <= 0))
{
hasMoreActiveResults = activeResultSet.next();
}
if (hasMoreInactiveResults && (compare >= 0))
{
hasMoreInactiveResults = inactiveResultSet.next();
}
}
if (deleteCount > 0)
{
deleteStatement.executeBatch();
}
deleteStatement.close();
if (insertCount > 0)
{
insertStatement.executeBatch();
}
insertStatement.close();
if (updateCount > 0)
{
updateStatement.executeBatch();
}
updateStatement.close();
inactiveStatement.close();
activeStatement.close();
// Collect unique constraints on this table from the active database and re-create them on the inactive database
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getCreateUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
inactiveConnection.commit();
logger.info(Messages.getMessage(Messages.INSERT_COUNT, insertCount, table.getName()));
logger.info(Messages.getMessage(Messages.UPDATE_COUNT, updateCount, table.getName()));
logger.info(Messages.getMessage(Messages.DELETE_COUNT, deleteCount, table.getName()));
}
}
catch (ExecutionException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e.getCause());
}
catch (InterruptedException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e);
}
catch (SQLException e)
{
this.rollback(inactiveConnection);
throw e;
}
inactiveConnection.setAutoCommit(true);
// Collect foreign key constraints from the active database and create them on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getCreateForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<String, Long> activeSequenceMap = dialect.getSequences(activeConnection);
Map<String, Long> inactiveSequenceMap = dialect.getSequences(inactiveConnection);
for (String sequence: activeSequenceMap.keySet())
{
long activeValue = activeSequenceMap.get(sequence);
long inactiveValue = inactiveSequenceMap.get(sequence);
if (activeValue != inactiveValue)
{
String sql = dialect.getAlterSequenceSQL(sequence, activeValue);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.close();
}
private boolean equals(Object object1, Object object2)
{
if (byte[].class.isInstance(object1) && byte[].class.isInstance(object2))
{
byte[] bytes1 = (byte[]) object1;
byte[] bytes2 = (byte[]) object2;
if (bytes1.length != bytes2.length)
{
return false;
}
return Arrays.equals(bytes1, bytes2);
}
return object1.equals(object2);
}
@SuppressWarnings("unchecked")
private int compare(Object object1, Object object2)
{
return Comparable.class.cast(object1).compareTo(object2);
}
/**
* @see net.sf.hajdbc.SynchronizationStrategy#requiresTableLocking()
*/
public boolean requiresTableLocking()
{
return true;
}
private void rollback(Connection connection)
{
try
{
connection.rollback();
connection.setAutoCommit(true);
}
catch (java.sql.SQLException e)
{
logger.warn(e.toString(), e);
}
}
/**
* @return the fetchSize.
*/
public int getFetchSize()
{
return this.fetchSize;
}
/**
* @param fetchSize the fetchSize to set.
*/
public void setFetchSize(int fetchSize)
{
this.fetchSize = fetchSize;
}
}
| true | true | public void synchronize(Connection inactiveConnection, Connection activeConnection, DatabaseMetaDataCache metaData, Dialect dialect) throws SQLException
{
inactiveConnection.setAutoCommit(true);
Statement statement = inactiveConnection.createStatement();
Collection<TableProperties> tables = metaData.getDatabaseProperties(inactiveConnection).getTables();
// Drop foreign key constraints on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getDropForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<Short, String> primaryKeyColumnMap = new TreeMap<Short, String>();
Set<Integer> primaryKeyColumnIndexSet = new LinkedHashSet<Integer>();
inactiveConnection.setAutoCommit(false);
try
{
for (TableProperties table: tables)
{
primaryKeyColumnMap.clear();
primaryKeyColumnIndexSet.clear();
UniqueConstraint primaryKey = table.getPrimaryKey();
if (primaryKey == null)
{
throw new SQLException(Messages.getMessage(Messages.PRIMARY_KEY_REQUIRED, this.getClass().getName(), table));
}
List<String> primaryKeyColumnList = primaryKey.getColumnList();
Collection<UniqueConstraint> constraints = table.getUniqueConstraints();
constraints.remove(primaryKey);
// Drop unique constraints on the current table
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getDropUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
Collection<String> columns = table.getColumns();
// List of colums for select statement - starting with primary key
List<String> columnList = new ArrayList<String>(columns);
columnList.addAll(primaryKeyColumnList);
for (String column: columns)
{
if (!primaryKeyColumnList.contains(column))
{
columnList.add(column);
}
}
List<String> nonPrimaryKeyColumnList = columnList.subList(primaryKeyColumnList.size(), columnList.size());
String commaDelimitedColumns = Strings.join(columnList, ", ");
// Retrieve table rows in primary key order
final String selectSQL = "SELECT " + commaDelimitedColumns + " FROM " + table.getName() + " ORDER BY " + Strings.join(primaryKeyColumnList, ", ");
final Statement inactiveStatement = inactiveConnection.createStatement();
inactiveStatement.setFetchSize(this.fetchSize);
logger.debug(selectSQL);
Callable<ResultSet> callable = new Callable<ResultSet>()
{
public ResultSet call() throws java.sql.SQLException
{
return inactiveStatement.executeQuery(selectSQL);
}
};
Future<ResultSet> future = this.executor.submit(callable);
Statement activeStatement = activeConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
activeStatement.setFetchSize(this.fetchSize);
ResultSet activeResultSet = activeStatement.executeQuery(selectSQL);
ResultSet inactiveResultSet = future.get();
String primaryKeyWhereClause = " WHERE " + Strings.join(primaryKeyColumnList, " = ? AND ") + " = ?";
// Construct DELETE SQL
String deleteSQL = "DELETE FROM " + table.getName() + primaryKeyWhereClause;
logger.debug(deleteSQL.toString());
PreparedStatement deleteStatement = inactiveConnection.prepareStatement(deleteSQL);
String[] parameters = new String[columnList.size()];
Arrays.fill(parameters, "?");
// Construct INSERT SQL
String insertSQL = "INSERT INTO " + table.getName() + " (" + commaDelimitedColumns + ") VALUES (" + Strings.join(Arrays.asList(parameters), ", ") + ")";
logger.debug(insertSQL);
PreparedStatement insertStatement = inactiveConnection.prepareStatement(insertSQL);
// Construct UPDATE SQL
String updateSQL = "UPDATE " + table.getName() + " SET " + Strings.join(nonPrimaryKeyColumnList, " = ?, ") + " = ?" + primaryKeyWhereClause;
logger.debug(updateSQL);
PreparedStatement updateStatement = inactiveConnection.prepareStatement(updateSQL);
boolean hasMoreActiveResults = activeResultSet.next();
boolean hasMoreInactiveResults = inactiveResultSet.next();
int insertCount = 0;
int updateCount = 0;
int deleteCount = 0;
while (hasMoreActiveResults || hasMoreInactiveResults)
{
int compare = 0;
if (!hasMoreActiveResults)
{
compare = 1;
}
else if (!hasMoreInactiveResults)
{
compare = -1;
}
else
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
// We assume that the primary keys column types are Comparable
compare = this.compare(activeObject, inactiveObject);
if (compare != 0)
{
break;
}
}
}
if (compare > 0)
{
deleteStatement.clearParameters();
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
deleteStatement.setObject(i, inactiveResultSet.getObject(i), type);
}
deleteStatement.addBatch();
deleteCount += 1;
}
else if (compare < 0)
{
insertStatement.clearParameters();
for (int i = 1; i <= columnList.size(); ++i)
{
Object object = activeResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
if (activeResultSet.wasNull())
{
insertStatement.setNull(i, type);
}
else
{
insertStatement.setObject(i, object, type);
}
}
insertStatement.addBatch();
insertCount += 1;
}
else // if (compare == 0)
{
updateStatement.clearParameters();
boolean updated = false;
for (int i = primaryKeyColumnList.size() + 1; i <= columnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
int index = i - primaryKeyColumnList.size();
if (activeResultSet.wasNull())
{
updateStatement.setNull(index, type);
updated |= !inactiveResultSet.wasNull();
}
else
{
updateStatement.setObject(index, activeObject, type);
updated |= inactiveResultSet.wasNull();
updated |= !equals(activeObject, inactiveObject);
}
}
if (updated)
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
updateStatement.setObject(i + nonPrimaryKeyColumnList.size(), inactiveResultSet.getObject(i), type);
}
updateStatement.addBatch();
updateCount += 1;
}
}
if (hasMoreActiveResults && (compare <= 0))
{
hasMoreActiveResults = activeResultSet.next();
}
if (hasMoreInactiveResults && (compare >= 0))
{
hasMoreInactiveResults = inactiveResultSet.next();
}
}
if (deleteCount > 0)
{
deleteStatement.executeBatch();
}
deleteStatement.close();
if (insertCount > 0)
{
insertStatement.executeBatch();
}
insertStatement.close();
if (updateCount > 0)
{
updateStatement.executeBatch();
}
updateStatement.close();
inactiveStatement.close();
activeStatement.close();
// Collect unique constraints on this table from the active database and re-create them on the inactive database
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getCreateUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
inactiveConnection.commit();
logger.info(Messages.getMessage(Messages.INSERT_COUNT, insertCount, table.getName()));
logger.info(Messages.getMessage(Messages.UPDATE_COUNT, updateCount, table.getName()));
logger.info(Messages.getMessage(Messages.DELETE_COUNT, deleteCount, table.getName()));
}
}
catch (ExecutionException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e.getCause());
}
catch (InterruptedException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e);
}
catch (SQLException e)
{
this.rollback(inactiveConnection);
throw e;
}
inactiveConnection.setAutoCommit(true);
// Collect foreign key constraints from the active database and create them on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getCreateForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<String, Long> activeSequenceMap = dialect.getSequences(activeConnection);
Map<String, Long> inactiveSequenceMap = dialect.getSequences(inactiveConnection);
for (String sequence: activeSequenceMap.keySet())
{
long activeValue = activeSequenceMap.get(sequence);
long inactiveValue = inactiveSequenceMap.get(sequence);
if (activeValue != inactiveValue)
{
String sql = dialect.getAlterSequenceSQL(sequence, activeValue);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.close();
}
| public void synchronize(Connection inactiveConnection, Connection activeConnection, DatabaseMetaDataCache metaData, Dialect dialect) throws SQLException
{
inactiveConnection.setAutoCommit(true);
Statement statement = inactiveConnection.createStatement();
Collection<TableProperties> tables = metaData.getDatabaseProperties(inactiveConnection).getTables();
// Drop foreign key constraints on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getDropForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<Short, String> primaryKeyColumnMap = new TreeMap<Short, String>();
Set<Integer> primaryKeyColumnIndexSet = new LinkedHashSet<Integer>();
inactiveConnection.setAutoCommit(false);
try
{
for (TableProperties table: tables)
{
primaryKeyColumnMap.clear();
primaryKeyColumnIndexSet.clear();
UniqueConstraint primaryKey = table.getPrimaryKey();
if (primaryKey == null)
{
throw new SQLException(Messages.getMessage(Messages.PRIMARY_KEY_REQUIRED, this.getClass().getName(), table.getName()));
}
List<String> primaryKeyColumnList = primaryKey.getColumnList();
Collection<UniqueConstraint> constraints = table.getUniqueConstraints();
constraints.remove(primaryKey);
// Drop unique constraints on the current table
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getDropUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
Collection<String> columns = table.getColumns();
// List of colums for select statement - starting with primary key
List<String> columnList = new ArrayList<String>(columns);
columnList.addAll(primaryKeyColumnList);
for (String column: columns)
{
if (!primaryKeyColumnList.contains(column))
{
columnList.add(column);
}
}
List<String> nonPrimaryKeyColumnList = columnList.subList(primaryKeyColumnList.size(), columnList.size());
String commaDelimitedColumns = Strings.join(columnList, ", ");
// Retrieve table rows in primary key order
final String selectSQL = "SELECT " + commaDelimitedColumns + " FROM " + table.getName() + " ORDER BY " + Strings.join(primaryKeyColumnList, ", ");
final Statement inactiveStatement = inactiveConnection.createStatement();
inactiveStatement.setFetchSize(this.fetchSize);
logger.debug(selectSQL);
Callable<ResultSet> callable = new Callable<ResultSet>()
{
public ResultSet call() throws java.sql.SQLException
{
return inactiveStatement.executeQuery(selectSQL);
}
};
Future<ResultSet> future = this.executor.submit(callable);
Statement activeStatement = activeConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
activeStatement.setFetchSize(this.fetchSize);
ResultSet activeResultSet = activeStatement.executeQuery(selectSQL);
ResultSet inactiveResultSet = future.get();
String primaryKeyWhereClause = " WHERE " + Strings.join(primaryKeyColumnList, " = ? AND ") + " = ?";
// Construct DELETE SQL
String deleteSQL = "DELETE FROM " + table.getName() + primaryKeyWhereClause;
logger.debug(deleteSQL.toString());
PreparedStatement deleteStatement = inactiveConnection.prepareStatement(deleteSQL);
String[] parameters = new String[columnList.size()];
Arrays.fill(parameters, "?");
// Construct INSERT SQL
String insertSQL = "INSERT INTO " + table.getName() + " (" + commaDelimitedColumns + ") VALUES (" + Strings.join(Arrays.asList(parameters), ", ") + ")";
logger.debug(insertSQL);
PreparedStatement insertStatement = inactiveConnection.prepareStatement(insertSQL);
// Construct UPDATE SQL
String updateSQL = "UPDATE " + table.getName() + " SET " + Strings.join(nonPrimaryKeyColumnList, " = ?, ") + " = ?" + primaryKeyWhereClause;
logger.debug(updateSQL);
PreparedStatement updateStatement = inactiveConnection.prepareStatement(updateSQL);
boolean hasMoreActiveResults = activeResultSet.next();
boolean hasMoreInactiveResults = inactiveResultSet.next();
int insertCount = 0;
int updateCount = 0;
int deleteCount = 0;
while (hasMoreActiveResults || hasMoreInactiveResults)
{
int compare = 0;
if (!hasMoreActiveResults)
{
compare = 1;
}
else if (!hasMoreInactiveResults)
{
compare = -1;
}
else
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
// We assume that the primary keys column types are Comparable
compare = this.compare(activeObject, inactiveObject);
if (compare != 0)
{
break;
}
}
}
if (compare > 0)
{
deleteStatement.clearParameters();
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
deleteStatement.setObject(i, inactiveResultSet.getObject(i), type);
}
deleteStatement.addBatch();
deleteCount += 1;
}
else if (compare < 0)
{
insertStatement.clearParameters();
for (int i = 1; i <= columnList.size(); ++i)
{
Object object = activeResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
if (activeResultSet.wasNull())
{
insertStatement.setNull(i, type);
}
else
{
insertStatement.setObject(i, object, type);
}
}
insertStatement.addBatch();
insertCount += 1;
}
else // if (compare == 0)
{
updateStatement.clearParameters();
boolean updated = false;
for (int i = primaryKeyColumnList.size() + 1; i <= columnList.size(); ++i)
{
Object activeObject = activeResultSet.getObject(i);
Object inactiveObject = inactiveResultSet.getObject(i);
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
int index = i - primaryKeyColumnList.size();
if (activeResultSet.wasNull())
{
updateStatement.setNull(index, type);
updated |= !inactiveResultSet.wasNull();
}
else
{
updateStatement.setObject(index, activeObject, type);
updated |= inactiveResultSet.wasNull();
updated |= !equals(activeObject, inactiveObject);
}
}
if (updated)
{
for (int i = 1; i <= primaryKeyColumnList.size(); ++i)
{
int type = dialect.getColumnType(table.getColumn(columnList.get(i - 1)));
updateStatement.setObject(i + nonPrimaryKeyColumnList.size(), inactiveResultSet.getObject(i), type);
}
updateStatement.addBatch();
updateCount += 1;
}
}
if (hasMoreActiveResults && (compare <= 0))
{
hasMoreActiveResults = activeResultSet.next();
}
if (hasMoreInactiveResults && (compare >= 0))
{
hasMoreInactiveResults = inactiveResultSet.next();
}
}
if (deleteCount > 0)
{
deleteStatement.executeBatch();
}
deleteStatement.close();
if (insertCount > 0)
{
insertStatement.executeBatch();
}
insertStatement.close();
if (updateCount > 0)
{
updateStatement.executeBatch();
}
updateStatement.close();
inactiveStatement.close();
activeStatement.close();
// Collect unique constraints on this table from the active database and re-create them on the inactive database
for (UniqueConstraint constraint: constraints)
{
String sql = dialect.getCreateUniqueConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
statement.executeBatch();
statement.clearBatch();
inactiveConnection.commit();
logger.info(Messages.getMessage(Messages.INSERT_COUNT, insertCount, table.getName()));
logger.info(Messages.getMessage(Messages.UPDATE_COUNT, updateCount, table.getName()));
logger.info(Messages.getMessage(Messages.DELETE_COUNT, deleteCount, table.getName()));
}
}
catch (ExecutionException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e.getCause());
}
catch (InterruptedException e)
{
this.rollback(inactiveConnection);
throw new net.sf.hajdbc.SQLException(e);
}
catch (SQLException e)
{
this.rollback(inactiveConnection);
throw e;
}
inactiveConnection.setAutoCommit(true);
// Collect foreign key constraints from the active database and create them on the inactive database
for (TableProperties table: tables)
{
for (ForeignKeyConstraint constraint: table.getForeignKeyConstraints())
{
String sql = dialect.getCreateForeignKeyConstraintSQL(constraint);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.clearBatch();
Map<String, Long> activeSequenceMap = dialect.getSequences(activeConnection);
Map<String, Long> inactiveSequenceMap = dialect.getSequences(inactiveConnection);
for (String sequence: activeSequenceMap.keySet())
{
long activeValue = activeSequenceMap.get(sequence);
long inactiveValue = inactiveSequenceMap.get(sequence);
if (activeValue != inactiveValue)
{
String sql = dialect.getAlterSequenceSQL(sequence, activeValue);
logger.debug(sql);
statement.addBatch(sql);
}
}
statement.executeBatch();
statement.close();
}
|
diff --git a/src/main/java/org/sqlitejdbcng/SqliteDatabaseMetadata.java b/src/main/java/org/sqlitejdbcng/SqliteDatabaseMetadata.java
index f3d9350..a0fcd66 100644
--- a/src/main/java/org/sqlitejdbcng/SqliteDatabaseMetadata.java
+++ b/src/main/java/org/sqlitejdbcng/SqliteDatabaseMetadata.java
@@ -1,1378 +1,1378 @@
/*
* Copyright (c) 2013, Timothy Stack
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sqlitejdbcng;
import org.sqlitejdbcng.bridj.Sqlite3;
import org.sqlitejdbcng.internal.ColumnData;
import org.sqlitejdbcng.internal.SQLKeywords;
import java.sql.*;
import java.util.*;
public class SqliteDatabaseMetadata implements DatabaseMetaData {
private static final String KEYWORD_LIST;
static {
SQLKeywords keywords = new SQLKeywords();
List<String> sqliteList = new ArrayList<>(Arrays.asList(keywords.getSqliteKeywords()));
sqliteList.removeAll(Arrays.asList(keywords.getSqlKeywords()));
KEYWORD_LIST = Sqlite3.join(sqliteList.toArray(), ",");
}
private final SqliteConnection conn;
public SqliteDatabaseMetadata(SqliteConnection conn) {
this.conn = conn;
}
@Override
public boolean allProceduresAreCallable() throws SQLException {
return false;
}
@Override
public boolean allTablesAreSelectable() throws SQLException {
return true;
}
@Override
public String getURL() throws SQLException {
return this.conn.getURL();
}
@Override
public String getUserName() throws SQLException {
return "";
}
@Override
public boolean isReadOnly() throws SQLException {
return this.conn.isReadOnly();
}
@Override
public boolean nullsAreSortedHigh() throws SQLException {
return false;
}
@Override
public boolean nullsAreSortedLow() throws SQLException {
return true;
}
@Override
public boolean nullsAreSortedAtStart() throws SQLException {
return false;
}
@Override
public boolean nullsAreSortedAtEnd() throws SQLException {
return false;
}
@Override
public String getDatabaseProductName() throws SQLException {
return "SQLite";
}
@Override
public String getDatabaseProductVersion() throws SQLException {
return Sqlite3.sqlite3_libversion().getCString();
}
@Override
public String getDriverName() throws SQLException {
return SqliteDriver.class.getPackage().getName();
}
@Override
public String getDriverVersion() throws SQLException {
return "" + SqliteDriver.VERSION[0] + "." + SqliteDriver.VERSION[1];
}
@Override
public int getDriverMajorVersion() {
return SqliteDriver.VERSION[0];
}
@Override
public int getDriverMinorVersion() {
return SqliteDriver.VERSION[1];
}
@Override
public boolean usesLocalFiles() throws SQLException {
return true;
}
@Override
public boolean usesLocalFilePerTable() throws SQLException {
return false;
}
@Override
public boolean supportsMixedCaseIdentifiers() throws SQLException {
return false;
}
@Override
public boolean storesUpperCaseIdentifiers() throws SQLException {
return false;
}
@Override
public boolean storesLowerCaseIdentifiers() throws SQLException {
return false;
}
@Override
public boolean storesMixedCaseIdentifiers() throws SQLException {
return true;
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getIdentifierQuoteString() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getSQLKeywords() throws SQLException {
return KEYWORD_LIST;
}
@Override
public String getNumericFunctions() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getStringFunctions() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getSystemFunctions() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getTimeDateFunctions() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getSearchStringEscape() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getExtraNameCharacters() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsAlterTableWithAddColumn() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsAlterTableWithDropColumn() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsColumnAliasing() throws SQLException {
return true;
}
@Override
public boolean nullPlusNonNullIsNull() throws SQLException {
return true;
}
@Override
public boolean supportsConvert() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsConvert(int i, int i2) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsTableCorrelationNames() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsDifferentTableCorrelationNames() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsExpressionsInOrderBy() throws SQLException {
return true;
}
@Override
public boolean supportsOrderByUnrelated() throws SQLException {
return true;
}
@Override
public boolean supportsGroupBy() throws SQLException {
return true;
}
@Override
public boolean supportsGroupByUnrelated() throws SQLException {
return true;
}
@Override
public boolean supportsGroupByBeyondSelect() throws SQLException {
return true;
}
@Override
public boolean supportsLikeEscapeClause() throws SQLException {
return true;
}
@Override
public boolean supportsMultipleResultSets() throws SQLException {
return false;
}
@Override
public boolean supportsMultipleTransactions() throws SQLException {
return true;
}
@Override
public boolean supportsNonNullableColumns() throws SQLException {
return true;
}
@Override
public boolean supportsMinimumSQLGrammar() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsCoreSQLGrammar() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsExtendedSQLGrammar() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsANSI92EntryLevelSQL() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsANSI92IntermediateSQL() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsANSI92FullSQL() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsIntegrityEnhancementFacility() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsOuterJoins() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsFullOuterJoins() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsLimitedOuterJoins() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getSchemaTerm() throws SQLException {
return "";
}
@Override
public String getProcedureTerm() throws SQLException {
return "";
}
@Override
public String getCatalogTerm() throws SQLException {
return "database";
}
@Override
public boolean isCatalogAtStart() throws SQLException {
return true;
}
@Override
public String getCatalogSeparator() throws SQLException {
return ".";
}
@Override
public boolean supportsSchemasInDataManipulation() throws SQLException {
return false;
}
@Override
public boolean supportsSchemasInProcedureCalls() throws SQLException {
return false;
}
@Override
public boolean supportsSchemasInTableDefinitions() throws SQLException {
return false;
}
@Override
public boolean supportsSchemasInIndexDefinitions() throws SQLException {
return false;
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException {
return false;
}
@Override
public boolean supportsCatalogsInDataManipulation() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsCatalogsInProcedureCalls() throws SQLException {
return false;
}
@Override
public boolean supportsCatalogsInTableDefinitions() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsCatalogsInIndexDefinitions() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsPositionedDelete() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsPositionedUpdate() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsSelectForUpdate() throws SQLException {
return false;
}
@Override
public boolean supportsStoredProcedures() throws SQLException {
return false;
}
@Override
public boolean supportsSubqueriesInComparisons() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsSubqueriesInExists() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsSubqueriesInIns() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsSubqueriesInQuantifieds() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsCorrelatedSubqueries() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsUnion() throws SQLException {
return true;
}
@Override
public boolean supportsUnionAll() throws SQLException {
return true;
}
@Override
public boolean supportsOpenCursorsAcrossCommit() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsOpenCursorsAcrossRollback() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsOpenStatementsAcrossCommit() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsOpenStatementsAcrossRollback() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getMaxBinaryLiteralLength() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_LENGTH.value(), -1);
}
@Override
public int getMaxCharLiteralLength() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_LENGTH.value(), -1);
}
@Override
public int getMaxColumnNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxColumnsInGroupBy() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_COLUMN.value(), -1);
}
@Override
public int getMaxColumnsInIndex() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_COLUMN.value(), -1);
}
@Override
public int getMaxColumnsInOrderBy() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_COLUMN.value(), -1);
}
@Override
public int getMaxColumnsInSelect() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_COLUMN.value(), -1);
}
@Override
public int getMaxColumnsInTable() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_COLUMN.value(), -1);
}
@Override
public int getMaxConnections() throws SQLException {
return 0;
}
@Override
public int getMaxCursorNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxIndexLength() throws SQLException {
return 0;
}
@Override
public int getMaxSchemaNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxProcedureNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxCatalogNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxRowSize() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_LENGTH.value(), -1);
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() throws SQLException {
return true;
}
@Override
public int getMaxStatementLength() throws SQLException {
return Sqlite3.sqlite3_limit(this.conn.getHandle(), Sqlite3.Limit.SQLITE_LIMIT_SQL_LENGTH.value(), -1);
}
@Override
public int getMaxStatements() throws SQLException {
return 0;
}
@Override
public int getMaxTableNameLength() throws SQLException {
return 0;
}
@Override
public int getMaxTablesInSelect() throws SQLException {
return 64;
}
@Override
public int getMaxUserNameLength() throws SQLException {
return 0;
}
@Override
public int getDefaultTransactionIsolation() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsTransactions() throws SQLException {
return true;
}
@Override
public boolean supportsTransactionIsolationLevel(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsDataManipulationTransactionsOnly() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean dataDefinitionCausesTransactionCommit() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean dataDefinitionIgnoredInTransactions() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
private ResultSet executeConstantQuery(String constantQuery) throws SQLException {
Statement stmt = this.conn.createStatement();
try {
stmt.closeOnCompletion();
return stmt.executeQuery(constantQuery);
}
catch (SQLException e) {
stmt.close();
throw e;
}
}
@Override
public ResultSet getProcedures(String s, String s2, String s3) throws SQLException {
return this.executeConstantQuery(
"SELECT null as PROCEDURE_CAT, null as PROCEDURE_SCHEM, null as PROCEDURE_NAME, " +
"null as RES1, null as RES2, null as RES3, null as REMARKS, " +
"null as PROCEDURE_TYPE, null as SPECIFIC_NAME LIMIT 0");
}
@Override
public ResultSet getProcedureColumns(String s, String s2, String s3, String s4) throws SQLException {
return this.executeConstantQuery(
"SELECT null as PROCEDURE_CAT, null as PROCEDURE_SCHEM, null as PROCEDURE_NAME, " +
"null as COLUMN_NAME, null as COLUMN_TYPE, null as DATA_TYPE, " +
"null as TYPE_NAME, null as PRECISION, null as LENGTH, null as SCALE, " +
"null as RADIX, null as NULLABLE, null as REMARKS, null as COLUMN_DEF, " +
"null as SQL_DATA_TYPE, null as SQL_DATETIME_SUB, null as CHAR_OCTET_LENGTH, " +
"null as ORDINAL_POSITION, null as IS_NULLABLE, null as SPECIFIC_NAME " +
"LIMIT 0");
}
private static final String[] DEFAULT_TABLE_TYPES = { "TABLE", "VIEW" };
@Override
public ResultSet getTables(String catalog,
String schemaPattern,
String tableNamePattern,
String[] types) throws SQLException {
if (schemaPattern != null && !schemaPattern.isEmpty())
throw new SQLFeatureNotSupportedException("SQLite does not support schemas");
if (catalog == null || catalog.isEmpty())
catalog = "main";
if (types == null) {
types = DEFAULT_TABLE_TYPES;
}
String sql = Sqlite3.mprintf(
"SELECT ? as TABLE_CAT, null as TABLE_SCHEM, name as TABLE_NAME, " +
"upper(type) as TABLE_TYPE, sql as REMARKS, null as TYPE_CAT, " +
"null as TYPE_SCHEM, null as TYPE_NAME, " +
"\"row_id\" as SELF_REFERENCING_COL_NAME, " +
"\"SYSTEM\" as REF_GENERATION FROM %Q.sqlite_master " +
"WHERE name LIKE ? and upper(type) in (%s) " +
"ORDER BY TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME",
catalog,
Sqlite3.join(Collections.nCopies(types.length, "?").toArray(), ", "));
PreparedStatement ps = this.conn.prepareStatement(sql);
ps.closeOnCompletion();
try {
ps.setString(1, catalog);
if (tableNamePattern == null)
tableNamePattern = "%";
ps.setString(2, tableNamePattern);
for (int lpc = 0; lpc < types.length; lpc++) {
ps.setString(3 + lpc, types[lpc]);
}
return ps.executeQuery();
}
catch (SQLException e) {
ps.close();
throw e;
}
}
@Override
public ResultSet getSchemas() throws SQLException {
return this.executeConstantQuery(
"SELECT null as TABLE_SCHEM, null as TABLE_CATALOG LIMIT 0");
}
@Override
public ResultSet getCatalogs() throws SQLException {
try (Statement stmt = this.conn.createStatement()) {
List<String> dbNames = new ArrayList<>();
try (ResultSet rs = stmt.executeQuery("PRAGMA database_list")) {
while (rs.next()) {
dbNames.add(rs.getString(2));
}
}
String query = Sqlite3.join(
Collections.nCopies(dbNames.size(), "SELECT ? as TABLE_CAT").toArray(),
" UNION ALL ");
PreparedStatement preparedStatement = this.conn.prepareStatement(query);
preparedStatement.closeOnCompletion();
try {
for (int lpc = 0; lpc < dbNames.size(); lpc++) {
preparedStatement.setString(lpc + 1, dbNames.get(lpc));
}
return preparedStatement.executeQuery();
}
catch (SQLException e) {
preparedStatement.close();
throw e;
}
}
}
@Override
public ResultSet getTableTypes() throws SQLException {
return this.executeConstantQuery(
"SELECT 'TABLE' as TABLE_TYPE UNION ALL " +
"SELECT 'VIEW' as TABLE_TYPE");
}
private static final String COLUMN_QUERY =
"SELECT ? AS TABLE_CAT, null AS TABLE_SCHEM, ? AS TABLE_NAME, " +
"? AS COLUMN_NAME, ? AS DATA_TYPE, ? AS TYPE_NAME, ? AS COLUMN_SIZE, " +
"null AS BUFFER_LENGTH, ? AS DECIMAL_DIGITS, 10 AS NUM_PREC_RADIX, " +
"? AS NULLABLE, '' AS REMARKS, ? AS COLUMN_DEF, null AS SQL_DATA_TYPE, " +
"null AS SQL_DATETIME_SUB, ? AS ORDINAL_POSITION, ? AS IS_NULLABLE, " +
"null AS SCOPE_CATALOG, null AS SCOPE_SCHEMA, null AS SCOPE_TABLE, " +
"null AS SOURCE_DATA_TYPE, ? AS IS_AUTOINCREMENT, ? AS IS_GENERATEDCOLUMN ";
@Override
public ResultSet getColumns(String catalog,
String schemaPattern,
String tableNamePattern,
String columnNamePattern) throws SQLException {
List<String> tableList = new ArrayList<>();
String query;
/* XXX We should iterate over the catalogs instead of just defaulting to "main" */
- if (catalog == null)
+ if (catalog == null || catalog.isEmpty())
catalog = "main";
if (tableNamePattern == null)
tableNamePattern = "%";
if (columnNamePattern == null)
columnNamePattern = "%";
query = Sqlite3.mprintf("SELECT tbl_name FROM %Q.sqlite_master WHERE type='table' AND tbl_name LIKE ?",
catalog);
try (PreparedStatement ps = this.conn.prepareStatement(query)) {
ps.setString(1, tableNamePattern);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
tableList.add(rs.getString(1));
}
}
}
List<ColumnData> columnList = new ArrayList<>();
columnNamePattern = columnNamePattern.replaceAll("%", ".*");
try (Statement stmt = this.conn.createStatement()) {
for (String tableName : tableList) {
query = Sqlite3.mprintf("PRAGMA %Q.table_info(%Q)", catalog, tableName);
try (ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
ColumnData cd = new ColumnData(this.conn.getHandle(), catalog, tableName, rs);
if (!cd.name.matches(columnNamePattern))
continue;
columnList.add(cd);
}
}
}
}
String constantQuery = "", limit = "";
for (int lpc = 0; lpc < columnList.size(); lpc++) {
if (!constantQuery.isEmpty())
constantQuery += " UNION ALL ";
constantQuery += COLUMN_QUERY;
}
if (constantQuery.isEmpty()) {
constantQuery = COLUMN_QUERY;
limit = " LIMIT 0";
}
constantQuery += " ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION";
constantQuery += limit;
PreparedStatement ps = this.conn.prepareStatement(constantQuery);
ps.closeOnCompletion();
int index = 1;
for (ColumnData column : columnList) {
ps.setString(index++, catalog);
ps.setString(index++, column.tableName);
ps.setString(index++, column.name);
ps.setInt(index++, column.sqlType);
ps.setString(index++, column.type);
ps.setInt(index++, 0);
ps.setInt(index++, 0);
ps.setInt(index++, column.notNull);
ps.setString(index++, column.defaultValue);
ps.setInt(index++, column.index);
ps.setString(index++, column.notNull == columnNoNulls ? "NO" : "YES");
ps.setInt(index++, 0);
ps.setInt(index++, 0);
}
return ps.executeQuery();
}
@Override
public ResultSet getColumnPrivileges(String s, String s2, String s3, String s4) throws SQLException {
return this.executeConstantQuery(
"SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, " +
"NULL AS COLUMN_NAME, NULL AS GRANTOR, NULL AS GRANTEE, " +
"NULL AS PRIVILEGE, NULL AS IS_GRANTABLE LIMIT 0"
);
}
@Override
public ResultSet getTablePrivileges(String s, String s2, String s3) throws SQLException {
return this.executeConstantQuery(
"SELECT NULL AS TABLE_CAT, NULL AS TABLE_SCHEM, NULL AS TABLE_NAME, " +
"NULL AS GRANTOR, NULL AS GRANTEE, " +
"NULL AS PRIVILEGE, NULL AS IS_GRANTABLE LIMIT 0"
);
}
@Override
public ResultSet getBestRowIdentifier(String s, String s2, String s3, int i, boolean b) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getVersionColumns(String s, String s2, String s3) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
private static final String PRIMARY_KEY_QUERY =
"SELECT ? AS TABLE_CAT, null AS TABLE_SCHEM, ? AS TABLE_NAME, ? AS COLUMN_NAME, " +
"? AS KEY_SEQ, null AS PK_NAME ";
@Override
public ResultSet getPrimaryKeys(String catalog, String schema, String tableName) throws SQLException {
List<ColumnData> columnList = new ArrayList<>();
String query, limit = "";
try (Statement stmt = this.conn.createStatement()) {
if (catalog != null)
query = Sqlite3.mprintf("PRAGMA %Q.table_info(%Q)", catalog, tableName);
else
query = Sqlite3.mprintf("PRAGMA table_info(%Q)", tableName);
try (ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
ColumnData cd = new ColumnData(this.conn.getHandle(), catalog, tableName, rs);
if (cd.primaryKey == 0)
continue;
columnList.add(cd);
}
}
}
String constantQuery = "";
for (int lpc = 0; lpc < columnList.size(); lpc++) {
if (!constantQuery.isEmpty())
constantQuery += " UNION ALL ";
constantQuery += PRIMARY_KEY_QUERY;
}
if (constantQuery.isEmpty()) {
constantQuery = PRIMARY_KEY_QUERY;
limit = " LIMIT 0";
}
constantQuery += " ORDER BY COLUMN_NAME";
constantQuery += limit;
PreparedStatement ps = this.conn.prepareStatement(constantQuery);
ps.closeOnCompletion();
int index = 1;
for (ColumnData column : columnList) {
ps.setString(index++, catalog);
ps.setString(index++, tableName);
ps.setString(index++, column.name);
ps.setInt(index++, column.primaryKey);
}
return ps.executeQuery();
}
public static class ForeignKeyData {
private static final Map<String, Integer> ACTION_MAP = new HashMap<>();
static {
ACTION_MAP.put("SET NULL", DatabaseMetaData.importedKeySetNull);
ACTION_MAP.put("SET DEFAULT", DatabaseMetaData.importedKeySetDefault);
ACTION_MAP.put("CASCADE", DatabaseMetaData.importedKeyCascade);
ACTION_MAP.put("RESTRICT", DatabaseMetaData.importedKeyRestrict);
ACTION_MAP.put("NO ACTION", DatabaseMetaData.importedKeyNoAction);
}
private static final int actionStringToInt(String actionStr) {
Integer actionInt = ACTION_MAP.get(actionStr);
if (actionInt == null)
throw new RuntimeException("Unknown sqlite action string " + actionStr);
return actionInt;
}
public final int id;
public final int seq;
public final String fromTable;
public final String fromColumn;
public final String toTable;
public final String toColumn;
public final int onUpdate;
public final int onDelete;
public final String match;
public ForeignKeyData(String fromTable, ResultSet rs) throws SQLException {
this.fromTable = fromTable;
this.id = rs.getInt("id");
this.seq = rs.getInt("seq");
this.toTable = rs.getString("table");
this.fromColumn = rs.getString("from");
this.toColumn = rs.getString("to");
this.onUpdate = actionStringToInt(rs.getString("on_update"));
this.onDelete = actionStringToInt(rs.getString("on_delete"));
this.match = rs.getString("match");
}
public ForeignKeyData(String fromTable, String toTable) {
this.id = -1;
this.seq = 0;
this.fromTable = fromTable;
this.fromColumn = null;
this.toTable = toTable;
this.toColumn = null;
this.onUpdate = -1;
this.onDelete = -1;
this.match = null;
}
}
private Map<String, List<ForeignKeyData>> getForeignKeyData(String catalog) throws SQLException {
Map<String, List<ForeignKeyData>> table2Key = new HashMap<>();
try (Statement stmt = this.conn.createStatement()) {
List<String> allTables = new ArrayList<>();
String tableQuery;
/* XXX We need to iterate over all of the catalogs */
if (catalog != null) {
tableQuery = Sqlite3.mprintf(
"SELECT name FROM %Q.sqlite_master WHERE type='table'", catalog);
}
else {
tableQuery = "SELECT name FROM sqlite_master WHERE type='table'";
}
try (ResultSet rs = stmt.executeQuery(tableQuery)) {
while (rs.next()) {
allTables.add(rs.getString(1));
}
}
for (String catalogTable : allTables) {
try (ResultSet rs = stmt.executeQuery(Sqlite3.mprintf("PRAGMA %Q.foreign_key_list(%Q)",
catalog, catalogTable))) {
while (rs.next()) {
ForeignKeyData fkd = new ForeignKeyData(catalogTable, rs);
if (!table2Key.containsKey(fkd.fromTable))
table2Key.put(fkd.fromTable, new ArrayList<ForeignKeyData>());
if (!table2Key.containsKey(fkd.toTable))
table2Key.put(fkd.toTable, new ArrayList<ForeignKeyData>());
table2Key.get(fkd.fromTable).add(fkd);
table2Key.get(fkd.toTable).add(fkd);
}
}
}
}
return table2Key;
}
private static final String FOREIGN_KEY_QUERY = "SELECT ? AS PKTABLE_CAT, " +
"NULL AS PKTABLE_SCHEM, ? AS PKTABLE_NAME, " +
"? AS PKCOLUMN_NAME, ? AS FKTABLE_CAT, NULL AS FKTABLE_SCHEM, " +
"? AS FKTABLE_NAME, ? AS FKCOLUMN_NAME, ? AS KEY_SEQ, ? AS UPDATE_RULE, " +
"? AS DELETE_RULE, NULL AS FK_NAME, NULL AS PK_NAME, ? AS DEFERRABILITY ";
private ResultSet getForeignKeys(String catalog, String fromTable, String toTable) throws SQLException {
Map<String, List<ForeignKeyData>> table2Key = getForeignKeyData(catalog);
List<ForeignKeyData> columnList = new ArrayList<>();
String limit = "";
if (table2Key.containsKey(fromTable)) {
columnList.addAll(table2Key.get(fromTable));
}
if (table2Key.containsKey(toTable)) {
columnList.addAll(table2Key.get(toTable));
}
String constantQuery = "";
for (ForeignKeyData fkd : columnList) {
if (fromTable != null && !fromTable.equals(fkd.fromTable))
continue;
if (toTable != null && !toTable.equals(fkd.toTable))
continue;
if (!constantQuery.isEmpty())
constantQuery += " UNION ALL ";
constantQuery += FOREIGN_KEY_QUERY;
}
if (constantQuery.isEmpty()) {
constantQuery = FOREIGN_KEY_QUERY;
limit = " LIMIT 0";
}
constantQuery += " ORDER BY PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, KEY_SEQ";
constantQuery += limit;
PreparedStatement ps = this.conn.prepareStatement(constantQuery);
ps.closeOnCompletion();
int index = 1;
for (ForeignKeyData fkd : columnList) {
if (fromTable != null && !fromTable.equals(fkd.fromTable))
continue;
if (toTable != null && !toTable.equals(fkd.toTable))
continue;
ps.setString(index++, catalog);
ps.setString(index++, fkd.toTable);
ps.setString(index++, fkd.toColumn);
ps.setString(index++, catalog);
ps.setString(index++, fkd.fromTable);
ps.setString(index++, fkd.fromColumn);
ps.setInt(index++, fkd.seq + 1);
ps.setInt(index++, fkd.onUpdate);
ps.setInt(index++, fkd.onDelete);
ps.setInt(index++, importedKeyInitiallyImmediate); // XXX
}
return ps.executeQuery();
}
/**
* The DEFERRABILITY is always set to the value of 'importedKeyInitiallyImmediate'
* since there is no way to tell at run time whether it is set to immediate or
* deferred.
*
* {@inheritDoc}
*/
@Override
public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException {
return this.getForeignKeys(catalog, table, null);
}
@Override
public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException {
return this.getForeignKeys(catalog, null, table);
}
@Override
public ResultSet getCrossReference(String parentCatalog,
String parentSchema,
String parentTable,
String foreignCatalog,
String foreignSchema,
String foreignTable) throws SQLException {
if (parentCatalog != foreignCatalog)
throw new SQLNonTransientException("Catalog names must be the same");
return this.getForeignKeys(parentCatalog, foreignTable, parentTable);
}
@Override
public ResultSet getTypeInfo() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getIndexInfo(String s, String s2, String s3, boolean b, boolean b2) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsResultSetType(int i) throws SQLException {
return (i == ResultSet.TYPE_FORWARD_ONLY);
}
@Override
public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException {
return (type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY);
}
@Override
public boolean ownUpdatesAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean ownDeletesAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean ownInsertsAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean othersUpdatesAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean othersDeletesAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean othersInsertsAreVisible(int i) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean updatesAreDetected(int i) throws SQLException {
return false;
}
@Override
public boolean deletesAreDetected(int i) throws SQLException {
return false;
}
@Override
public boolean insertsAreDetected(int i) throws SQLException {
return false;
}
@Override
public boolean supportsBatchUpdates() throws SQLException {
return true;
}
@Override
public ResultSet getUDTs(String s, String s2, String s3, int[] ints) throws SQLException {
return this.executeConstantQuery(
"SELECT null as TYPE_CAT, null as TYPE_SCHEM, null as TYPE_NAME, " +
"null as CLASS_NAME, null as DATA_TYPE, null as REMARKS, null as BASE_TYPE " +
"LIMIT 0"
);
}
@Override
public Connection getConnection() throws SQLException {
return this.conn;
}
@Override
public boolean supportsSavepoints() throws SQLException {
return true;
}
@Override
public boolean supportsNamedParameters() throws SQLException {
return false;
}
@Override
public boolean supportsMultipleOpenResults() throws SQLException {
return false;
}
@Override
public boolean supportsGetGeneratedKeys() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getSuperTypes(String s, String s2, String s3) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getSuperTables(String s, String s2, String s3) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getAttributes(String s, String s2, String s3, String s4) throws SQLException {
return this.executeConstantQuery(
"SELECT null as TYPE_CAT, NULL AS TYPE_SCHEM, NULL AS TYPE_NAME, " +
"NULL AS ATTR_NAME, NULL AS DATA_TYPE, NULL AS ATTR_TYPE_NAME, " +
"NULL AS ATTR_SIZE, NULL AS DECIMAL_DIGITS, NULL AS NUM_PREC_RADIX, " +
"NULL AS NULLABLE, NULL AS REMARKS, NULL AS ATTR_DEF, " +
"NULL AS SQL_DATA_TYPE, NULL AS SQL_DATETIME_SUB, " +
"NULL AS CHAR_OCTET_LENGTH, NULL AS ORDINAL_POSITION, " +
"NULL AS IS_NULLABLE, NULL AS SCOPE_CATALOG, NULL AS SCOPE_SCHEMA, " +
"NULL AS SCOPE_TABLE, NULL AS SOURCE_DATA_TYPE LIMIT 0"
);
}
@Override
public boolean supportsResultSetHoldability(int i) throws SQLException {
return (i == ResultSet.CLOSE_CURSORS_AT_COMMIT);
}
@Override
public int getResultSetHoldability() throws SQLException {
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public int getDatabaseMajorVersion() throws SQLException {
int version = Sqlite3.sqlite3_libversion_number();
return version / 1000000;
}
@Override
public int getDatabaseMinorVersion() throws SQLException {
int version = Sqlite3.sqlite3_libversion_number();
return (version / 1000) % 1000;
}
@Override
public int getJDBCMajorVersion() throws SQLException {
return 4;
}
@Override
public int getJDBCMinorVersion() throws SQLException {
return 0;
}
@Override
public int getSQLStateType() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean locatorsUpdateCopy() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsStatementPooling() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public RowIdLifetime getRowIdLifetime() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getSchemas(String s, String s2) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() throws SQLException {
return false;
}
@Override
public boolean autoCommitFailureClosesAllResultSets() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getClientInfoProperties() throws SQLException {
return this.executeConstantQuery(
"SELECT '' AS NAME, 0 as MAX_LEN, '' as DEFAULT_VALUE, '' as DESCRIPTION LIMIT 0");
}
@Override
public ResultSet getFunctions(String s, String s2, String s3) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getFunctionColumns(String s, String s2, String s3, String s4) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean generatedKeyAlwaysReturned() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
}
| true | true | public ResultSet getColumns(String catalog,
String schemaPattern,
String tableNamePattern,
String columnNamePattern) throws SQLException {
List<String> tableList = new ArrayList<>();
String query;
/* XXX We should iterate over the catalogs instead of just defaulting to "main" */
if (catalog == null)
catalog = "main";
if (tableNamePattern == null)
tableNamePattern = "%";
if (columnNamePattern == null)
columnNamePattern = "%";
query = Sqlite3.mprintf("SELECT tbl_name FROM %Q.sqlite_master WHERE type='table' AND tbl_name LIKE ?",
catalog);
try (PreparedStatement ps = this.conn.prepareStatement(query)) {
ps.setString(1, tableNamePattern);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
tableList.add(rs.getString(1));
}
}
}
List<ColumnData> columnList = new ArrayList<>();
columnNamePattern = columnNamePattern.replaceAll("%", ".*");
try (Statement stmt = this.conn.createStatement()) {
for (String tableName : tableList) {
query = Sqlite3.mprintf("PRAGMA %Q.table_info(%Q)", catalog, tableName);
try (ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
ColumnData cd = new ColumnData(this.conn.getHandle(), catalog, tableName, rs);
if (!cd.name.matches(columnNamePattern))
continue;
columnList.add(cd);
}
}
}
}
String constantQuery = "", limit = "";
for (int lpc = 0; lpc < columnList.size(); lpc++) {
if (!constantQuery.isEmpty())
constantQuery += " UNION ALL ";
constantQuery += COLUMN_QUERY;
}
if (constantQuery.isEmpty()) {
constantQuery = COLUMN_QUERY;
limit = " LIMIT 0";
}
constantQuery += " ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION";
constantQuery += limit;
PreparedStatement ps = this.conn.prepareStatement(constantQuery);
ps.closeOnCompletion();
int index = 1;
for (ColumnData column : columnList) {
ps.setString(index++, catalog);
ps.setString(index++, column.tableName);
ps.setString(index++, column.name);
ps.setInt(index++, column.sqlType);
ps.setString(index++, column.type);
ps.setInt(index++, 0);
ps.setInt(index++, 0);
ps.setInt(index++, column.notNull);
ps.setString(index++, column.defaultValue);
ps.setInt(index++, column.index);
ps.setString(index++, column.notNull == columnNoNulls ? "NO" : "YES");
ps.setInt(index++, 0);
ps.setInt(index++, 0);
}
return ps.executeQuery();
}
| public ResultSet getColumns(String catalog,
String schemaPattern,
String tableNamePattern,
String columnNamePattern) throws SQLException {
List<String> tableList = new ArrayList<>();
String query;
/* XXX We should iterate over the catalogs instead of just defaulting to "main" */
if (catalog == null || catalog.isEmpty())
catalog = "main";
if (tableNamePattern == null)
tableNamePattern = "%";
if (columnNamePattern == null)
columnNamePattern = "%";
query = Sqlite3.mprintf("SELECT tbl_name FROM %Q.sqlite_master WHERE type='table' AND tbl_name LIKE ?",
catalog);
try (PreparedStatement ps = this.conn.prepareStatement(query)) {
ps.setString(1, tableNamePattern);
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
tableList.add(rs.getString(1));
}
}
}
List<ColumnData> columnList = new ArrayList<>();
columnNamePattern = columnNamePattern.replaceAll("%", ".*");
try (Statement stmt = this.conn.createStatement()) {
for (String tableName : tableList) {
query = Sqlite3.mprintf("PRAGMA %Q.table_info(%Q)", catalog, tableName);
try (ResultSet rs = stmt.executeQuery(query)) {
while (rs.next()) {
ColumnData cd = new ColumnData(this.conn.getHandle(), catalog, tableName, rs);
if (!cd.name.matches(columnNamePattern))
continue;
columnList.add(cd);
}
}
}
}
String constantQuery = "", limit = "";
for (int lpc = 0; lpc < columnList.size(); lpc++) {
if (!constantQuery.isEmpty())
constantQuery += " UNION ALL ";
constantQuery += COLUMN_QUERY;
}
if (constantQuery.isEmpty()) {
constantQuery = COLUMN_QUERY;
limit = " LIMIT 0";
}
constantQuery += " ORDER BY TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION";
constantQuery += limit;
PreparedStatement ps = this.conn.prepareStatement(constantQuery);
ps.closeOnCompletion();
int index = 1;
for (ColumnData column : columnList) {
ps.setString(index++, catalog);
ps.setString(index++, column.tableName);
ps.setString(index++, column.name);
ps.setInt(index++, column.sqlType);
ps.setString(index++, column.type);
ps.setInt(index++, 0);
ps.setInt(index++, 0);
ps.setInt(index++, column.notNull);
ps.setString(index++, column.defaultValue);
ps.setInt(index++, column.index);
ps.setString(index++, column.notNull == columnNoNulls ? "NO" : "YES");
ps.setInt(index++, 0);
ps.setInt(index++, 0);
}
return ps.executeQuery();
}
|
diff --git a/src/minecraft/adanaran/mods/bfr/gui/GUIMill.java b/src/minecraft/adanaran/mods/bfr/gui/GUIMill.java
index 2f7123e..7dc92af 100644
--- a/src/minecraft/adanaran/mods/bfr/gui/GUIMill.java
+++ b/src/minecraft/adanaran/mods/bfr/gui/GUIMill.java
@@ -1,56 +1,56 @@
package adanaran.mods.bfr.gui;
import org.lwjgl.opengl.GL11;
import adanaran.mods.bfr.entities.TileEntityMill;
import adanaran.mods.bfr.entities.TileEntityStove;
import adanaran.mods.bfr.inventory.ContainerMill;
import adanaran.mods.bfr.inventory.ContainerStove;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.resources.I18n;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.World;
/**
* @author Demitreus
*
*/
public class GUIMill extends GuiContainer {
private TileEntityMill millInventory;
public static final ResourceLocation guiTexture = new ResourceLocation("bfr","gui/container/mill.png");
public GUIMill(InventoryPlayer invPlayer, TileEntityMill tileEntityMill, World world){
super(new ContainerMill(invPlayer, tileEntityMill, world));
this.millInventory = tileEntityMill;
}
/**
* Draw the foreground layer for the GuiContainer (everything in front of the items)
*/
protected void drawGuiContainerForegroundLayer(int par1, int par2)
{
String s = this.millInventory.isInvNameLocalized() ? this.millInventory.getInvName() : I18n.func_135053_a(this.millInventory.getInvName());
this.fontRenderer.drawString(s, this.xSize / 2 - this.fontRenderer.getStringWidth(s) / 2, 6, 4210752);
this.fontRenderer.drawString(I18n.func_135053_a("container.inventory"), 8, this.ySize - 96 + 2, 4210752);
}
@Override
protected void drawGuiContainerBackgroundLayer(float f, int i, int j) {
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
this.mc.func_110434_K().func_110577_a(guiTexture);
int k = (this.width - this.xSize) / 2;
int l = (this.height - this.ySize) / 2;
this.drawTexturedModalRect(k, l, 0, 0, this.xSize, this.ySize);
int i1;
if(millInventory.isMilling()){
- this.drawTexturedModalRect(k + 9, l + 48 , 176, 31, 32, 32);
+ this.drawTexturedModalRect(k+12, l+16, 176, 31, 33, 33);
}
//position of progressbar
i1 = millInventory.getMillProgressScaled(24);
- this.drawTexturedModalRect(k + 89, l + 34, 176, 14, i1 + 1, 16);
+ this.drawTexturedModalRect(k + 79, l + 34, 176, 14, i1 + 1, 16);
}
}
| false | true | protected void drawGuiContainerBackgroundLayer(float f, int i, int j) {
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
this.mc.func_110434_K().func_110577_a(guiTexture);
int k = (this.width - this.xSize) / 2;
int l = (this.height - this.ySize) / 2;
this.drawTexturedModalRect(k, l, 0, 0, this.xSize, this.ySize);
int i1;
if(millInventory.isMilling()){
this.drawTexturedModalRect(k + 9, l + 48 , 176, 31, 32, 32);
}
//position of progressbar
i1 = millInventory.getMillProgressScaled(24);
this.drawTexturedModalRect(k + 89, l + 34, 176, 14, i1 + 1, 16);
}
| protected void drawGuiContainerBackgroundLayer(float f, int i, int j) {
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
this.mc.func_110434_K().func_110577_a(guiTexture);
int k = (this.width - this.xSize) / 2;
int l = (this.height - this.ySize) / 2;
this.drawTexturedModalRect(k, l, 0, 0, this.xSize, this.ySize);
int i1;
if(millInventory.isMilling()){
this.drawTexturedModalRect(k+12, l+16, 176, 31, 33, 33);
}
//position of progressbar
i1 = millInventory.getMillProgressScaled(24);
this.drawTexturedModalRect(k + 79, l + 34, 176, 14, i1 + 1, 16);
}
|
diff --git a/src/org/eclipse/imp/lpg/wizards/NewLanguageSupportWizard.java b/src/org/eclipse/imp/lpg/wizards/NewLanguageSupportWizard.java
index 1c1a158..226f763 100644
--- a/src/org/eclipse/imp/lpg/wizards/NewLanguageSupportWizard.java
+++ b/src/org/eclipse/imp/lpg/wizards/NewLanguageSupportWizard.java
@@ -1,228 +1,228 @@
/*
* (C) Copyright IBM Corporation 2007
*
* This file is part of the Eclipse IMP.
*/
package org.eclipse.imp.lpg.wizards;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.Status;
import org.eclipse.imp.core.ErrorHandler;
import org.eclipse.imp.lpg.LPGPlugin;
import org.eclipse.imp.lpg.LPGRuntimePlugin;
import org.eclipse.imp.lpg.preferences.LPGPreferencesDialogConstants;
import org.eclipse.imp.preferences.IPreferencesService;
import org.eclipse.imp.preferences.PreferencesService;
import org.eclipse.imp.runtime.RuntimePlugin;
import org.eclipse.imp.wizards.GeneratedComponentWizard;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchWizard;
import org.osgi.framework.Bundle;
public class NewLanguageSupportWizard extends GeneratedComponentWizard //ExtensionPointWizard
{
protected GrammarOptions fGrammarOptions;
protected String fGrammarFileName;
protected String fLexerFileName;
protected String fKwlexerFileName;
protected String fControllerFileName;
protected String fLocatorFileName;
static final String astDirectory= "Ast";
static final String astNode= "ASTNode";
static final String sAutoGenTemplate= "%options parent_saved,automatic_ast=toplevel,visitor=preorder,ast_directory=./" + astDirectory
+ ",ast_type=" + astNode;
static final String sKeywordTemplate= "%options filter=kwTemplate.gi";
private final static List<String /*pluginID*/> dependencies= new ArrayList<String>();
static {
dependencies.add(RuntimePlugin.IMP_RUNTIME);
dependencies.add("org.eclipse.core.runtime");
dependencies.add("org.eclipse.core.resources");
dependencies.add("org.eclipse.imp.runtime");
dependencies.add("lpg.runtime");
}
protected List getPluginDependencies() {
return dependencies;
}
@Override
protected void generateCodeStubs(IProgressMonitor mon) throws CoreException {
// TODO Auto-generated method stub
}
protected static String getTemplateBundleID() {
return LPGPlugin.kPluginID;
}
/**
* We will accept the selection in the workbench to see if
* we can initialize from it.
* @see IWorkbenchWizard#init(IWorkbench, IStructuredSelection)
*/
public void init(IWorkbench workbench, IStructuredSelection selection) {
// this.selection = selection;
}
protected Map<String,String> getStandardSubstitutions() {
Map<String,String> result= new HashMap<String,String>();
result.put("$LANG_NAME$", fLanguageName);
result.put("$CLASS_NAME_PREFIX$", fClassNamePrefix);
result.put("$PACKAGE_NAME$", fPackageName);
return result;
}
protected String fFileNamePrefix = null;
protected void setFileNamePrefix() {
String projectLocation = fProject.getLocation().toString();
fFileNamePrefix = projectLocation + '/' + getProjectSourceLocation() + fPackageName.replace('.', '/') + '/';
}
protected String getFileNamePrefix() {
if (fFileNamePrefix == null) {
setFileNamePrefix();
}
return fFileNamePrefix;
}
protected IFile createParseController(
String fileName, String templateName, boolean hasKeywords, IProject project, IProgressMonitor monitor)
throws CoreException
{
// Note: Not all substitution parameters may be used in all templates
Map<String,String> subs= getStandardSubstitutions();
subs.put("$AST_PKG_NODE$", fPackageName + "." + astDirectory + "." + astNode);
subs.put("$AST_NODE$", astNode);
subs.put("$PARSER_TYPE$", fClassNamePrefix + "Parser");
subs.put("$LEXER_TYPE$", fClassNamePrefix + "Lexer");
// SMS 9 Sep 2007
// Added parameter for plugin id to take advantage of an alternative
// form of createFileFromTemplate
// (Did the same for similar invocations in other methods)
return createFileFromTemplate(fileName, LPGPlugin.kPluginID, templateName, fPackageFolder, subs, project, monitor);
}
protected IFile createNodeLocator(
String fileName, String templateName, IProject project, IProgressMonitor monitor) throws CoreException
{
// Note: Not all substitution parameters may be used in all templates
Map<String,String> subs= getStandardSubstitutions();
subs.put("$AST_PKG_NODE$", fPackageName + "." + astDirectory + "." + astNode);
subs.put("$AST_NODE$", astNode);
subs.put("$PARSER_TYPE$", fClassNamePrefix + "Parser");
subs.put("$LEXER_TYPE$", fClassNamePrefix + "Lexer");
return createFileFromTemplate(fileName, LPGPlugin.kPluginID, templateName, fPackageFolder, subs, project, monitor);
}
protected IFile createKWLexer(String fileName, String templateName,
boolean hasKeywords, IProject project, IProgressMonitor monitor) throws CoreException
{
Map<String,String> subs= getStandardSubstitutions();
subs.put("$TEMPLATE$", templateName);
String kwLexerTemplateName = "kwlexer.gi";
return createFileFromTemplate(fileName, LPGPlugin.kPluginID, kwLexerTemplateName, fPackageFolder, subs, project, monitor);
}
protected IFile createLexer(String fileName, String templateName,
boolean hasKeywords, IProject project, IProgressMonitor monitor) throws CoreException
{
Map<String,String> subs= getStandardSubstitutions();
subs.put("$TEMPLATE$", templateName);
subs.put("$KEYWORD_FILTER$",
hasKeywords ? ("%options filter=" + fClassNamePrefix + "KWLexer.gi") : "");
subs.put("$KEYWORD_LEXER$", hasKeywords ? ("$" + fClassNamePrefix + "KWLexer") : "Object");
subs.put("$LEXER_MAP$", (hasKeywords ? "LexerBasicMap" : "LexerVeryBasicMap"));
String lexerTemplateName = "lexer.gi";
return createFileFromTemplate(fileName, LPGPlugin.kPluginID, lexerTemplateName, fPackageFolder, subs, project, monitor);
}
protected IFile createGrammar(String fileName, String templateName,
boolean autoGenerateASTs, IProject project, IProgressMonitor monitor) throws CoreException
{
Map<String,String> subs= getStandardSubstitutions();
subs.put("$AUTO_GENERATE$", autoGenerateASTs ? sAutoGenTemplate : "");
subs.put("$TEMPLATE$", templateName);
String grammarTemplateFileName = "grammar.g";
return createFileFromTemplate(fileName, LPGPlugin.kPluginID, grammarTemplateFileName, fPackageFolder, subs, project, monitor);
}
// Adapted from GeneratedComponentWizard
/**
* This method is called when 'Finish' button is pressed in the wizard.
* We will create an operation and run it using wizard as execution context.
*
* This method is quite a bit simpler than the corresponding method for
* ExtensionPointWizard since no extensions have to be created here.
*/
public boolean performFinish()
{
// Do this in the UI thread while the wizard fields are still accessible
try {
collectCodeParms();
} catch (IllegalArgumentException e) {
// Exception might be thrown if selected package is not acceptable
//ErrorHandler.reportError("NewLPGGrammarWizard.performFinish: Could not collect parameters for stubs", e);
return false;
}
// Invoke after collectCodeParms() so that collectCodeParms()
// can collect the names of files from the wizard
if (!okToClobberFiles(getFilesThatCouldBeClobbered()))
return false;
// Do we need to do just this in a runnable? Evidently not.
try {
generateCodeStubs(new NullProgressMonitor());
} catch (Exception e){
- ErrorHandler.reportError("NewLPGGrammarWizard.performFinish: Could not generate code stubs", e);
+ ErrorHandler.reportError("NewLanguageSupportrWizard.performFinish: Could not generate code stubs", e);
return false;
}
return true;
}
protected void setIncludeDirPreference() {
String lpgIncDirKey= LPGPreferencesDialogConstants.P_INCLUDEPATHTOUSE;
Bundle lpgMetaToolingBundle= Platform.getBundle(LPGPlugin.kPluginID);
URL templateDirURL= FileLocator.find(lpgMetaToolingBundle, new Path("/templates"), null);
try {
String lpgTemplatesDir= FileLocator.toFileURL(templateDirURL).getPath();
if (Platform.getOS().equals(Platform.OS_WIN32)) {
if (lpgTemplatesDir.startsWith("/")) {
lpgTemplatesDir = lpgTemplatesDir.substring(1); }
}
IPreferencesService ps= new PreferencesService(fProject);
ps.setLanguageName(LPGRuntimePlugin.getLanguageID());
ps.setStringPreference(IPreferencesService.PROJECT_LEVEL, lpgIncDirKey, lpgTemplatesDir);
ps.setBooleanPreference(IPreferencesService.PROJECT_LEVEL, LPGPreferencesDialogConstants.P_USEDEFAULTINCLUDEPATH, false);
} catch (IOException e) {
LPGPlugin.getInstance().getLog().log(new Status(IStatus.ERROR, LPGPlugin.kPluginID, 0, "Unable to resolve 'templates' directory in LPG metatooling plugin", null));
}
}
}
| true | true | public boolean performFinish()
{
// Do this in the UI thread while the wizard fields are still accessible
try {
collectCodeParms();
} catch (IllegalArgumentException e) {
// Exception might be thrown if selected package is not acceptable
//ErrorHandler.reportError("NewLPGGrammarWizard.performFinish: Could not collect parameters for stubs", e);
return false;
}
// Invoke after collectCodeParms() so that collectCodeParms()
// can collect the names of files from the wizard
if (!okToClobberFiles(getFilesThatCouldBeClobbered()))
return false;
// Do we need to do just this in a runnable? Evidently not.
try {
generateCodeStubs(new NullProgressMonitor());
} catch (Exception e){
ErrorHandler.reportError("NewLPGGrammarWizard.performFinish: Could not generate code stubs", e);
return false;
}
return true;
}
| public boolean performFinish()
{
// Do this in the UI thread while the wizard fields are still accessible
try {
collectCodeParms();
} catch (IllegalArgumentException e) {
// Exception might be thrown if selected package is not acceptable
//ErrorHandler.reportError("NewLPGGrammarWizard.performFinish: Could not collect parameters for stubs", e);
return false;
}
// Invoke after collectCodeParms() so that collectCodeParms()
// can collect the names of files from the wizard
if (!okToClobberFiles(getFilesThatCouldBeClobbered()))
return false;
// Do we need to do just this in a runnable? Evidently not.
try {
generateCodeStubs(new NullProgressMonitor());
} catch (Exception e){
ErrorHandler.reportError("NewLanguageSupportrWizard.performFinish: Could not generate code stubs", e);
return false;
}
return true;
}
|
diff --git a/kernel-impl/src/main/java/org/sakaiproject/log/impl/Log4jConfigurationManager.java b/kernel-impl/src/main/java/org/sakaiproject/log/impl/Log4jConfigurationManager.java
index e80e67bc..310b5606 100644
--- a/kernel-impl/src/main/java/org/sakaiproject/log/impl/Log4jConfigurationManager.java
+++ b/kernel-impl/src/main/java/org/sakaiproject/log/impl/Log4jConfigurationManager.java
@@ -1,379 +1,379 @@
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2006, 2008 Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.log.impl;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.log4j.Appender;
import org.apache.log4j.Layout;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.ErrorHandler;
import org.apache.log4j.spi.Filter;
import org.apache.log4j.spi.LoggingEvent;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.log.api.LogConfigurationManager;
import org.sakaiproject.log.api.LogPermissionException;
import org.sakaiproject.util.StringUtil;
/**
* <p>
* Log4jConfigurationManager lets us configure the log4j system with overrides from sakai.properties. Someday it might even have a service API for other fun things!
* </p>
*/
public abstract class Log4jConfigurationManager implements LogConfigurationManager
{
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(Log4jConfigurationManager.class);
/**********************************************************************************************************************************************************************************************************************************************************
* Dependencies
*********************************************************************************************************************************************************************************************************************************************************/
/**
* @return the UsageSessionService collaborator.
*/
protected abstract ServerConfigurationService serverConfigurationService();
/**
* @return the SecurityService collaborator.
*/
protected abstract SecurityService securityService();
/**********************************************************************************************************************************************************************************************************************************************************
* Configuration
*********************************************************************************************************************************************************************************************************************************************************/
/** Configuration: enable special log handling or not. */
protected boolean m_enabled = true;
/**
* Configuration: enable special log handling or not.
*
* @param value
* the setting (true of false) for enabled.
*/
public void setEnabled(String value)
{
m_enabled = Boolean.valueOf(value).booleanValue();
}
/** Map by logger name of set of message string starts to ignore. */
protected Map m_ignore = new HashMap();
public void setIgnore(Map ignore)
{
m_ignore = ignore;
}
/**********************************************************************************************************************************************************************************************************************************************************
* Init and Destroy
*********************************************************************************************************************************************************************************************************************************************************/
/**
* Final initialization, once all dependencies are set.
*/
public void init()
{
if (m_enabled)
{
// slip in our appender
Appender a = Logger.getRootLogger().getAppender("Sakai");
if (a != null)
{
Logger.getRootLogger().removeAppender(a);
Logger.getRootLogger().addAppender(new SakaiAppender(a));
}
// set the log4j logging system with some overrides from sakai.properties
// each in the form LEVEL.NAME where LEVEL is OFF | TRACE | DEBUG | INFO | WARN | ERROR | FATAL | ALL, name is the logger name (such as org.sakaiproject)
// example:
// log.config.count=3
// log.config.1 = ALL.org.sakaiproject.log.impl
// log.config.2 = OFF.org.sakaiproject
// log.config.3 = DEBUG.org.sakaiproject.db.impl
String configs[] = serverConfigurationService().getStrings("log.config");
if (configs != null)
{
for (int i = 0; i < configs.length; i++)
{
String parts[] = StringUtil.splitFirst(configs[i], ".");
if ((parts != null) && (parts.length == 2))
{
doSetLogLevel(parts[0], parts[1]);
}
else
{
M_log.warn("invalid log.config entry: ignoring: " + configs[i]);
}
}
}
}
M_log.info("init(): enabled: " + m_enabled);
}
/**
* Final cleanup.
*/
public void destroy()
{
M_log.info("destroy()");
}
/**
* Set the log level
*
* @param level
* The log level string - one of OFF | TRACE | DEBUG | INFO | WARN | ERROR | FATAL | ALL
* @param loggerName
* The logger name.
*/
protected boolean doSetLogLevel(String level, String loggerName)
{
if (level.equals("OFF"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.OFF);
M_log.info("OFF logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("TRACE"))
{
// Note: log4j has nothing below debug
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("TRACE (DEBUG) logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("DEBUG"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("DEBUG logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("INFO"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.INFO);
M_log.info("INFO logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("WARN"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.WARN);
M_log.info("WARN logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ERROR"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ERROR);
M_log.info("ERROR logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("FATAL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.FATAL);
M_log.info("FATAL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ALL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ALL);
M_log.info("ALL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else
{
- M_log.warn("invalid log level: ignorning: " + level);
+ M_log.warn("invalid log level: ignoring: " + level);
return false;
}
return true;
}
/**********************************************************************************************************************************************************************************************************************************************************
* Work interface methods: LogConfigurationManager
*********************************************************************************************************************************************************************************************************************************************************/
/**
* {@inheritDoc}
*/
public boolean setLogLevel(String level, String loggerName) throws LogPermissionException
{
// check that this is a "super" user with the security service
if (!securityService().isSuperUser())
{
throw new LogPermissionException();
}
return doSetLogLevel(level, loggerName);
}
/**********************************************************************************************************************************************************************************************************************************************************
* Our special Appender
*********************************************************************************************************************************************************************************************************************************************************/
class SakaiAppender implements org.apache.log4j.Appender
{
protected Appender m_other = null;
public SakaiAppender(Appender other)
{
m_other = other;
}
public void addFilter(Filter arg0)
{
m_other.addFilter(arg0);
}
public void clearFilters()
{
m_other.clearFilters();
}
public void close()
{
m_other.close();
}
public void doAppend(LoggingEvent arg0)
{
String logger = arg0.getLoggerName();
String message = arg0.getRenderedMessage();
Level level = arg0.getLevel();
Set toIgnore = (Set) m_ignore.get(logger);
if (toIgnore != null)
{
// if any of the strings in the set start our message, skip it
for (Iterator i = toIgnore.iterator(); i.hasNext();)
{
String start = (String) i.next();
if (message.startsWith(start)) return;
}
}
m_other.doAppend(arg0);
}
public ErrorHandler getErrorHandler()
{
return m_other.getErrorHandler();
}
public Filter getFilter()
{
return m_other.getFilter();
}
public Layout getLayout()
{
return m_other.getLayout();
}
public String getName()
{
return m_other.getName();
}
public boolean requiresLayout()
{
return m_other.requiresLayout();
}
public void setErrorHandler(ErrorHandler arg0)
{
m_other.setErrorHandler(arg0);
}
public void setLayout(Layout arg0)
{
m_other.setLayout(arg0);
}
public void setName(String arg0)
{
m_other.setName(arg0);
}
}
}
| true | true | protected boolean doSetLogLevel(String level, String loggerName)
{
if (level.equals("OFF"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.OFF);
M_log.info("OFF logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("TRACE"))
{
// Note: log4j has nothing below debug
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("TRACE (DEBUG) logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("DEBUG"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("DEBUG logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("INFO"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.INFO);
M_log.info("INFO logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("WARN"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.WARN);
M_log.info("WARN logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ERROR"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ERROR);
M_log.info("ERROR logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("FATAL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.FATAL);
M_log.info("FATAL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ALL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ALL);
M_log.info("ALL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else
{
M_log.warn("invalid log level: ignorning: " + level);
return false;
}
return true;
}
| protected boolean doSetLogLevel(String level, String loggerName)
{
if (level.equals("OFF"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.OFF);
M_log.info("OFF logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("TRACE"))
{
// Note: log4j has nothing below debug
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("TRACE (DEBUG) logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("DEBUG"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.DEBUG);
M_log.info("DEBUG logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("INFO"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.INFO);
M_log.info("INFO logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("WARN"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.WARN);
M_log.info("WARN logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ERROR"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ERROR);
M_log.info("ERROR logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("FATAL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.FATAL);
M_log.info("FATAL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else if (level.equals("ALL"))
{
Logger logger = Logger.getLogger(loggerName);
if (logger != null)
{
logger.setLevel(org.apache.log4j.Level.ALL);
M_log.info("ALL logging for: " + loggerName);
}
else
{
M_log.warn("no logger found: ignoring: " + loggerName);
}
}
else
{
M_log.warn("invalid log level: ignoring: " + level);
return false;
}
return true;
}
|
diff --git a/src/main/java/org/spout/vanilla/protocol/codec/PlayerBlockPlacementCodec.java b/src/main/java/org/spout/vanilla/protocol/codec/PlayerBlockPlacementCodec.java
index 9a3c12a1..81c42e21 100644
--- a/src/main/java/org/spout/vanilla/protocol/codec/PlayerBlockPlacementCodec.java
+++ b/src/main/java/org/spout/vanilla/protocol/codec/PlayerBlockPlacementCodec.java
@@ -1,97 +1,95 @@
/*
* This file is part of Vanilla.
*
* Copyright (c) 2011-2012, VanillaDev <http://www.spout.org/>
* Vanilla is licensed under the SpoutDev License Version 1.
*
* Vanilla is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition, 180 days after any changes are published, you can use the
* software, incorporating those changes, under the terms of the MIT license,
* as described in the SpoutDev License Version 1.
*
* Vanilla is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License,
* the MIT license and the SpoutDev License Version 1 along with this program.
* If not, see <http://www.gnu.org/licenses/> for the GNU Lesser General Public
* License and see <http://www.spout.org/SpoutDevLicenseV1.txt> for the full license,
* including the MIT license.
*/
package org.spout.vanilla.protocol.codec;
import java.io.IOException;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.spout.api.protocol.MessageCodec;
import org.spout.nbt.CompoundMap;
import org.spout.vanilla.protocol.ChannelBufferUtils;
import org.spout.vanilla.protocol.msg.PlayerBlockPlacementMessage;
public final class PlayerBlockPlacementCodec extends MessageCodec<PlayerBlockPlacementMessage> {
public PlayerBlockPlacementCodec() {
super(PlayerBlockPlacementMessage.class, 0x0F);
}
@Override
public PlayerBlockPlacementMessage decode(ChannelBuffer buffer) throws IOException {
int x = buffer.readInt();
int y = buffer.readUnsignedByte();
int z = buffer.readInt();
int direction = buffer.readUnsignedByte();
int count = 0;
int damage = 0;
CompoundMap nbtData = null;
int id = buffer.readUnsignedShort();
if (id != 0xFFFF) {
count = buffer.readUnsignedByte();
damage = buffer.readShort();
- if (ChannelBufferUtils.hasNbtData(id)) {
- nbtData = ChannelBufferUtils.readCompound(buffer);
- }
+ nbtData = ChannelBufferUtils.readCompound(buffer);
}
float dx = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dy = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dz = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
return new PlayerBlockPlacementMessage(x, y, z, direction, id, count, damage, nbtData, dx, dy, dz);
}
@Override
public ChannelBuffer encode(PlayerBlockPlacementMessage message) throws IOException {
int id = message.getId();
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeInt(message.getX());
buffer.writeByte(message.getY());
buffer.writeInt(message.getZ());
buffer.writeByte(message.getDirection());
buffer.writeShort(id);
if (id != -1) {
buffer.writeByte(message.getCount());
buffer.writeShort(message.getDamage());
if (ChannelBufferUtils.hasNbtData(id)) {
ChannelBufferUtils.writeCompound(buffer, message.getNbtData());
} else {
buffer.writeShort(-1);
}
}
buffer.writeByte((int) (message.getDX() * 16.0F));
buffer.writeByte((int) (message.getDY() * 16.0F));
buffer.writeByte((int) (message.getDZ() * 16.0F));
return buffer;
}
}
| true | true | public PlayerBlockPlacementMessage decode(ChannelBuffer buffer) throws IOException {
int x = buffer.readInt();
int y = buffer.readUnsignedByte();
int z = buffer.readInt();
int direction = buffer.readUnsignedByte();
int count = 0;
int damage = 0;
CompoundMap nbtData = null;
int id = buffer.readUnsignedShort();
if (id != 0xFFFF) {
count = buffer.readUnsignedByte();
damage = buffer.readShort();
if (ChannelBufferUtils.hasNbtData(id)) {
nbtData = ChannelBufferUtils.readCompound(buffer);
}
}
float dx = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dy = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dz = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
return new PlayerBlockPlacementMessage(x, y, z, direction, id, count, damage, nbtData, dx, dy, dz);
}
| public PlayerBlockPlacementMessage decode(ChannelBuffer buffer) throws IOException {
int x = buffer.readInt();
int y = buffer.readUnsignedByte();
int z = buffer.readInt();
int direction = buffer.readUnsignedByte();
int count = 0;
int damage = 0;
CompoundMap nbtData = null;
int id = buffer.readUnsignedShort();
if (id != 0xFFFF) {
count = buffer.readUnsignedByte();
damage = buffer.readShort();
nbtData = ChannelBufferUtils.readCompound(buffer);
}
float dx = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dy = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
float dz = ((float) (buffer.readByte() & 0xFF)) / 16.0F;
return new PlayerBlockPlacementMessage(x, y, z, direction, id, count, damage, nbtData, dx, dy, dz);
}
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/engine/robin/RobinEngine.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/engine/robin/RobinEngine.java
index fd34ab67825..cda00abf594 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/engine/robin/RobinEngine.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/engine/robin/RobinEngine.java
@@ -1,1340 +1,1352 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine.robin;
import org.apache.lucene.index.ExtendedIndexSearcher;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.Unicode;
import org.elasticsearch.common.bloom.BloomFilter;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.ReaderSearcherHolder;
import org.elasticsearch.common.lucene.uid.UidField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.resource.AcquirableResource;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bloom.BloomCache;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.*;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.merge.policy.EnableMergePolicy;
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
import org.elasticsearch.index.merge.scheduler.MergeSchedulerProvider;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogStreams;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.elasticsearch.common.lucene.Lucene.*;
import static org.elasticsearch.common.util.concurrent.resource.AcquirableResourceFactory.*;
/**
* @author kimchy (shay.banon)
*/
public class RobinEngine extends AbstractIndexShardComponent implements Engine {
private volatile ByteSizeValue indexingBufferSize;
private volatile int termIndexInterval;
private volatile int termIndexDivisor;
private volatile int indexConcurrency;
private final ReadWriteLock rwl = new ReentrantReadWriteLock();
private final AtomicBoolean optimizeMutex = new AtomicBoolean();
private long gcDeletesInMillis;
private final ThreadPool threadPool;
private final IndexSettingsService indexSettingsService;
private final Store store;
private final SnapshotDeletionPolicy deletionPolicy;
private final Translog translog;
private final MergePolicyProvider mergePolicyProvider;
private final MergeSchedulerProvider mergeScheduler;
private final AnalysisService analysisService;
private final SimilarityService similarityService;
private final BloomCache bloomCache;
private final boolean asyncLoadBloomFilter;
// no need for volatile, its always used under a lock
private IndexWriter indexWriter;
private volatile AcquirableResource<ReaderSearcherHolder> nrtResource;
private volatile boolean closed = false;
// flag indicating if a dirty operation has occurred since the last refresh
private volatile boolean dirty = false;
private volatile boolean possibleMergeNeeded = false;
// we use flushNeeded here, since if there are no changes, then the commit won't write
// will not really happen, and then the commitUserData and the new translog will not be reflected
private volatile boolean flushNeeded = false;
private volatile int disableFlushCounter = 0;
// indexing searcher is initialized
private final AtomicBoolean flushing = new AtomicBoolean();
private final ConcurrentMap<String, VersionValue> versionMap;
private final Object[] dirtyLocks;
private final Object refreshMutex = new Object();
private final ApplySettings applySettings = new ApplySettings();
private Throwable failedEngine = null;
private final Object failedEngineMutex = new Object();
private final CopyOnWriteArrayList<FailedEngineListener> failedEngineListeners = new CopyOnWriteArrayList<FailedEngineListener>();
private final AtomicLong translogIdGenerator = new AtomicLong();
private SegmentInfos lastCommittedSegmentInfos;
@Inject public RobinEngine(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool,
IndexSettingsService indexSettingsService,
Store store, SnapshotDeletionPolicy deletionPolicy, Translog translog,
MergePolicyProvider mergePolicyProvider, MergeSchedulerProvider mergeScheduler,
AnalysisService analysisService, SimilarityService similarityService,
BloomCache bloomCache) throws EngineException {
super(shardId, indexSettings);
Preconditions.checkNotNull(store, "Store must be provided to the engine");
Preconditions.checkNotNull(deletionPolicy, "Snapshot deletion policy must be provided to the engine");
Preconditions.checkNotNull(translog, "Translog must be provided to the engine");
this.gcDeletesInMillis = indexSettings.getAsTime("index.gc_deletes", TimeValue.timeValueSeconds(60)).millis();
this.indexingBufferSize = componentSettings.getAsBytesSize("index_buffer_size", new ByteSizeValue(64, ByteSizeUnit.MB)); // not really important, as it is set by the IndexingMemory manager
this.termIndexInterval = indexSettings.getAsInt("index.term_index_interval", IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL);
this.termIndexDivisor = indexSettings.getAsInt("index.term_index_divisor", 1); // IndexReader#DEFAULT_TERMS_INDEX_DIVISOR
this.asyncLoadBloomFilter = componentSettings.getAsBoolean("async_load_bloom", true); // Here for testing, should always be true
this.threadPool = threadPool;
this.indexSettingsService = indexSettingsService;
this.store = store;
this.deletionPolicy = deletionPolicy;
this.translog = translog;
this.mergePolicyProvider = mergePolicyProvider;
this.mergeScheduler = mergeScheduler;
this.analysisService = analysisService;
this.similarityService = similarityService;
this.bloomCache = bloomCache;
this.indexConcurrency = indexSettings.getAsInt("index.index_concurrency", IndexWriterConfig.DEFAULT_MAX_THREAD_STATES);
this.versionMap = new ConcurrentHashMap<String, VersionValue>();
this.dirtyLocks = new Object[indexConcurrency * 10]; // we multiply it by 10 to have enough...
for (int i = 0; i < dirtyLocks.length; i++) {
dirtyLocks[i] = new Object();
}
this.indexSettingsService.addListener(applySettings);
}
@Override public void updateIndexingBufferSize(ByteSizeValue indexingBufferSize) {
ByteSizeValue preValue = this.indexingBufferSize;
rwl.readLock().lock();
try {
// LUCENE MONITOR - If this restriction is removed from Lucene, remove it from here
if (indexingBufferSize.mbFrac() > 2048.0) {
this.indexingBufferSize = new ByteSizeValue(2048, ByteSizeUnit.MB);
} else {
this.indexingBufferSize = indexingBufferSize;
}
IndexWriter indexWriter = this.indexWriter;
if (indexWriter != null) {
indexWriter.getConfig().setRAMBufferSizeMB(this.indexingBufferSize.mbFrac());
}
} finally {
rwl.readLock().unlock();
}
// its inactive, make sure we do a full flush in this case, since the memory
// changes only after a "data" change has happened to the writer
if (indexingBufferSize == Engine.INACTIVE_SHARD_INDEXING_BUFFER && preValue != Engine.INACTIVE_SHARD_INDEXING_BUFFER) {
try {
flush(new Flush().full(true));
} catch (Exception e) {
logger.warn("failed to flush after setting shard to inactive", e);
}
}
}
@Override public void addFailedEngineListener(FailedEngineListener listener) {
failedEngineListeners.add(listener);
}
@Override public void start() throws EngineException {
rwl.writeLock().lock();
try {
if (indexWriter != null) {
throw new EngineAlreadyStartedException(shardId);
}
if (logger.isDebugEnabled()) {
logger.debug("Starting engine");
}
try {
this.indexWriter = createWriter();
} catch (IOException e) {
throw new EngineCreationFailureException(shardId, "Failed to create engine", e);
}
try {
if (IndexReader.indexExists(store.directory())) {
Map<String, String> commitUserData = IndexReader.getCommitUserData(store.directory());
if (commitUserData.containsKey(Translog.TRANSLOG_ID_KEY)) {
translogIdGenerator.set(Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY)));
} else {
translogIdGenerator.set(System.currentTimeMillis());
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogIdGenerator.get())).map());
}
} else {
translogIdGenerator.set(System.currentTimeMillis());
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogIdGenerator.get())).map());
}
translog.newTranslog(translogIdGenerator.get());
this.nrtResource = buildNrtResource(indexWriter);
SegmentInfos infos = new SegmentInfos();
infos.read(store.directory());
lastCommittedSegmentInfos = infos;
} catch (IOException e) {
try {
indexWriter.rollback();
} catch (IOException e1) {
// ignore
} finally {
try {
indexWriter.close();
} catch (IOException e1) {
// ignore
}
}
throw new EngineCreationFailureException(shardId, "Failed to open reader on writer", e);
}
} finally {
rwl.writeLock().unlock();
}
}
@Override public TimeValue defaultRefreshInterval() {
return new TimeValue(1, TimeUnit.SECONDS);
}
public GetResult get(Get get) throws EngineException {
rwl.readLock().lock();
try {
if (get.realtime()) {
VersionValue versionValue = versionMap.get(get.uid().text());
if (versionValue != null) {
if (versionValue.delete()) {
return GetResult.NOT_EXISTS;
}
if (!get.loadSource()) {
return new GetResult(true, versionValue.version(), null);
}
byte[] data = translog.read(versionValue.translogLocation());
if (data != null) {
try {
Translog.Source source = TranslogStreams.readSource(data);
return new GetResult(true, versionValue.version(), source);
} catch (IOException e) {
// switched on us, read it from the reader
}
}
}
}
// no version, get the version from the index, we know that we refresh on flush
Searcher searcher = searcher();
try {
UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(get.uid().text());
for (IndexReader reader : searcher.searcher().subReaders()) {
BloomFilter filter = bloomCache.filter(reader, UidFieldMapper.NAME, asyncLoadBloomFilter);
// we know that its not there...
if (!filter.isPresent(utf8.result, 0, utf8.length)) {
continue;
}
UidField.DocIdAndVersion docIdAndVersion = UidField.loadDocIdAndVersion(reader, get.uid());
if (docIdAndVersion != null && docIdAndVersion.docId != Lucene.NO_DOC) {
return new GetResult(searcher, docIdAndVersion);
}
}
} catch (Exception e) {
searcher.release();
//TODO: A better exception goes here
throw new EngineException(shardId(), "failed to load document", e);
}
searcher.release();
return GetResult.NOT_EXISTS;
} finally {
rwl.readLock().unlock();
}
}
@Override public void create(Create create) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerCreate(create, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new CreateFailedEngineException(shardId, create, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new CreateFailedEngineException(shardId, create, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerCreate(Create create, IndexWriter writer) throws IOException {
synchronized (dirtyLock(create.uid())) {
UidField uidField = create.uidField();
if (create.origin() == Operation.Origin.RECOVERY) {
uidField.version(create.version());
// we use update doc and not addDoc since we might get duplicates when using transient translog
if (create.docs().size() > 1) {
writer.updateDocuments(create.uid(), create.docs(), create.analyzer());
} else {
writer.updateDocument(create.uid(), create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
// on recovery, we get the actual version we want to use
if (create.version() != 0) {
versionMap.put(create.uid().text(), new VersionValue(create.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(create.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(create.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
// same logic as index
long updatedVersion;
if (create.origin() == Operation.Origin.PRIMARY) {
if (create.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = create.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= create.version()) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, create.version());
}
}
updatedVersion = create.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = create.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && create.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = create.version();
}
// if the doc does not exists or it exists but not delete
if (versionValue != null) {
if (!versionValue.delete()) {
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
} else if (currentVersion != -1) {
// its not deleted, its already there
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
uidField.version(updatedVersion);
create.version(updatedVersion);
if (create.docs().size() > 1) {
writer.addDocuments(create.docs(), create.analyzer());
} else {
writer.addDocument(create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
versionMap.put(create.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void index(Index index) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerIndex(index, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new IndexFailedEngineException(shardId, index, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new IndexFailedEngineException(shardId, index, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerIndex(Index index, IndexWriter writer) throws IOException {
synchronized (dirtyLock(index.uid())) {
UidField uidField = index.uidField();
if (index.origin() == Operation.Origin.RECOVERY) {
uidField.version(index.version());
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
// on recovery, we get the actual version we want to use
if (index.version() != 0) {
versionMap.put(index.uid().text(), new VersionValue(index.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(index.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(index.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (index.origin() == Operation.Origin.PRIMARY) {
if (index.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = index.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= index.version()) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, index.version());
}
}
updatedVersion = index.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = index.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && index.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = index.version();
}
uidField.version(updatedVersion);
index.version(updatedVersion);
if (currentVersion == -1) {
// document does not exists, we can optimize for create
if (index.docs().size() > 1) {
writer.addDocuments(index.docs(), index.analyzer());
} else {
writer.addDocument(index.docs().get(0), index.analyzer());
}
} else {
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
versionMap.put(index.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void delete(Delete delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerDelete(delete, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteFailedEngineException(shardId, delete, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new DeleteFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerDelete(Delete delete, IndexWriter writer) throws IOException {
synchronized (dirtyLock(delete.uid())) {
if (delete.origin() == Operation.Origin.RECOVERY) {
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
// update the version with the exact version from recovery, assuming we have it
if (delete.version() != 0) {
versionMap.put(delete.uid().text(), new VersionValue(delete.version(), true, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(delete.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(delete.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (delete.origin() == Operation.Origin.PRIMARY) {
if (delete.versionType() == VersionType.INTERNAL) { // internal version type
if (delete.version() != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (delete.version() != currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // External
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (currentVersion >= delete.version()) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
updatedVersion = delete.version();
}
} else { // if (delete.origin() == Operation.Origin.REPLICA) {
// on replica, the version is the future value expected (returned from the operation on the primary)
if (currentVersion != -2) { // -2 means we don't have a version in the index, ignore
// only check if we have a version for it, otherwise, ignore (see later)
if (currentVersion != -1) {
// with replicas, we only check for previous version, we allow to set a future version
if (delete.version() <= currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion - 1, delete.version());
}
}
}
// replicas already hold the "future" version
updatedVersion = delete.version();
}
if (currentVersion == -1) {
// if the doc does not exists, just update with doc 0
delete.version(0).notFound(true);
} else if (versionValue != null && versionValue.delete()) {
// if its a delete on delete and we have the current delete version, return it
delete.version(versionValue.version()).notFound(true);
} else {
delete.version(updatedVersion);
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.put(delete.uid().text(), new VersionValue(updatedVersion, true, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
}
@Override public void delete(DeleteByQuery delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId);
}
Query query;
if (delete.aliasFilter() == null) {
query = delete.query();
} else {
query = new FilteredQuery(delete.query(), delete.aliasFilter());
}
writer.deleteDocuments(query);
translog.add(new Translog.DeleteByQuery(delete));
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteByQueryFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
//TODO: This is heavy, since we refresh, but we really have to...
refreshVersioningTable(System.currentTimeMillis());
}
@Override public Searcher searcher() throws EngineException {
AcquirableResource<ReaderSearcherHolder> holder;
for (; ; ) {
holder = this.nrtResource;
if (holder.acquire()) {
break;
}
Thread.yield();
}
return new RobinSearchResult(holder);
}
@Override public boolean refreshNeeded() {
return dirty;
}
@Override public boolean possibleMergeNeeded() {
return this.possibleMergeNeeded;
}
@Override public void refresh(Refresh refresh) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId);
}
// we obtain a read lock here, since we don't want a flush to happen while we are refreshing
// since it flushes the index as well (though, in terms of concurrency, we are allowed to do it)
rwl.readLock().lock();
try {
// this engine always acts as if waitForOperations=true
IndexWriter currentWriter = indexWriter;
if (currentWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
try {
// we need to obtain a mutex here, to make sure we don't leave dangling readers
// we could have used an AtomicBoolean#compareAndSet, but, then we might miss refresh requests
// compared to on going ones
synchronized (refreshMutex) {
if (dirty || refresh.force()) {
dirty = false;
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
IndexReader newReader = current.resource().reader().reopen(true);
if (newReader != current.resource().reader()) {
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(newReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
nrtResource = newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
current.markForClose();
}
}
}
} catch (AlreadyClosedException e) {
// an index writer got replaced on us, ignore
} catch (Exception e) {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
} else if (currentWriter != indexWriter) {
// an index writer got replaced on us, ignore
} else {
throw new RefreshFailedEngineException(shardId, e);
}
} catch (OutOfMemoryError e) {
failEngine(e);
throw new RefreshFailedEngineException(shardId, e);
}
} finally {
rwl.readLock().unlock();
}
}
@Override public void flush(Flush flush) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
// check outside the lock as well so we can check without blocking on the write lock
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// don't allow for concurrent flush operations...
if (!flushing.compareAndSet(false, true)) {
throw new FlushNotAllowedEngineException(shardId, "Already flushing...");
}
try {
if (flush.full()) {
rwl.writeLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// disable refreshing, not dirty
dirty = false;
try {
// that's ok if the index writer failed and is in inconsistent state
// we will get an exception on a dirty operation, and will cause the shard
// to be allocated to a different node
indexWriter.close(false);
indexWriter = createWriter();
if (flushNeeded || flush.force()) {
flushNeeded = false;
long translogId = translogIdGenerator.incrementAndGet();
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
translog.newTranslog(translogId);
}
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
nrtResource = buildNrtResource(indexWriter);
current.markForClose();
} catch (Exception e) {
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
} finally {
rwl.writeLock().unlock();
}
} else {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
if (flushNeeded || flush.force()) {
flushNeeded = false;
try {
long translogId = translogIdGenerator.incrementAndGet();
translog.newTransientTranslog(translogId);
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
- translog.makeTransientCurrent();
+ if (flush.force()) {
+ // if we force, we might not have committed, we need to check that its the same id
+ Map<String, String> commitUserData = IndexReader.getCommitUserData(store.directory());
+ long committedTranslogId = Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY));
+ if (committedTranslogId != translogId) {
+ // we did not commit anything, revert to the old translog
+ translog.revertTransient();
+ } else {
+ translog.makeTransientCurrent();
+ }
+ } else {
+ translog.makeTransientCurrent();
+ }
} catch (Exception e) {
translog.revertTransient();
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
translog.revertTransient();
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
}
} finally {
rwl.readLock().unlock();
}
}
refreshVersioningTable(threadPool.estimatedTimeInMillis());
try {
SegmentInfos infos = new SegmentInfos();
infos.read(store.directory());
lastCommittedSegmentInfos = infos;
} catch (Exception e) {
logger.warn("failed to read latest segment infos on flush", e);
}
} finally {
flushing.set(false);
}
}
private void refreshVersioningTable(long time) {
// we need to refresh in order to clear older version values
refresh(new Refresh(true).force(true));
for (Map.Entry<String, VersionValue> entry : versionMap.entrySet()) {
String id = entry.getKey();
synchronized (dirtyLock(id)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set?
VersionValue versionValue = versionMap.get(id);
if (versionValue == null) {
continue;
}
if (time - versionValue.time() <= 0) {
continue; // its a newer value, from after/during we refreshed, don't clear it
}
if (versionValue.delete()) {
if ((time - versionValue.time()) > gcDeletesInMillis) {
versionMap.remove(id);
}
} else {
versionMap.remove(id);
}
}
}
}
@Override public void maybeMerge() throws EngineException {
if (!possibleMergeNeeded) {
return;
}
possibleMergeNeeded = false;
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
indexWriter.maybeMerge();
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
}
}
@Override public void optimize(Optimize optimize) throws EngineException {
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimizeMutex.compareAndSet(false, true)) {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
if (optimize.onlyExpungeDeletes()) {
indexWriter.expungeDeletes(false);
} else if (optimize.maxNumSegments() <= 0) {
indexWriter.maybeMerge();
possibleMergeNeeded = false;
} else {
indexWriter.optimize(optimize.maxNumSegments(), false);
}
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
optimizeMutex.set(false);
}
}
// wait for the merges outside of the read lock
if (optimize.waitForMerge()) {
indexWriter.waitForMerges();
}
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimize.refresh()) {
refresh(new Refresh(false).force(true));
}
}
@Override public <T> T snapshot(SnapshotHandler<T> snapshotHandler) throws EngineException {
SnapshotIndexCommit snapshotIndexCommit = null;
Translog.Snapshot traslogSnapshot = null;
rwl.readLock().lock();
try {
snapshotIndexCommit = deletionPolicy.snapshot();
traslogSnapshot = translog.snapshot();
} catch (Exception e) {
if (snapshotIndexCommit != null) snapshotIndexCommit.release();
throw new SnapshotFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
}
try {
return snapshotHandler.snapshot(snapshotIndexCommit, traslogSnapshot);
} finally {
snapshotIndexCommit.release();
traslogSnapshot.release();
}
}
@Override public void recover(RecoveryHandler recoveryHandler) throws EngineException {
// take a write lock here so it won't happen while a flush is in progress
// this means that next commits will not be allowed once the lock is released
rwl.writeLock().lock();
try {
disableFlushCounter++;
} finally {
rwl.writeLock().unlock();
}
SnapshotIndexCommit phase1Snapshot;
try {
phase1Snapshot = deletionPolicy.snapshot();
} catch (Exception e) {
--disableFlushCounter;
throw new RecoveryEngineException(shardId, 1, "Snapshot failed", e);
}
try {
recoveryHandler.phase1(phase1Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 1, "Execution failed", e);
}
Translog.Snapshot phase2Snapshot;
try {
phase2Snapshot = translog.snapshot();
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Snapshot failed", e);
}
try {
recoveryHandler.phase2(phase2Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
phase2Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Execution failed", e);
}
rwl.writeLock().lock();
Translog.Snapshot phase3Snapshot = null;
try {
phase3Snapshot = translog.snapshot(phase2Snapshot);
recoveryHandler.phase3(phase3Snapshot);
} catch (Exception e) {
throw new RecoveryEngineException(shardId, 3, "Execution failed", e);
} finally {
--disableFlushCounter;
rwl.writeLock().unlock();
phase1Snapshot.release();
phase2Snapshot.release();
if (phase3Snapshot != null) {
phase3Snapshot.release();
}
}
}
@Override public List<Segment> segments() {
rwl.readLock().lock();
try {
IndexWriter indexWriter = this.indexWriter;
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
Map<String, Segment> segments = new HashMap<String, Segment>();
// first, go over and compute the search ones...
Searcher searcher = searcher();
try {
IndexReader[] readers = searcher.reader().getSequentialSubReaders();
for (IndexReader reader : readers) {
assert reader instanceof SegmentReader;
SegmentInfo info = Lucene.getSegmentInfo((SegmentReader) reader);
assert !segments.containsKey(info.name);
Segment segment = new Segment(info.name);
segment.search = true;
segment.docCount = reader.numDocs();
segment.delDocCount = reader.numDeletedDocs();
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
}
} finally {
searcher.release();
}
// now, correlate or add the committed ones...
if (lastCommittedSegmentInfos != null) {
SegmentInfos infos = lastCommittedSegmentInfos;
for (SegmentInfo info : infos) {
Segment segment = segments.get(info.name);
if (segment == null) {
segment = new Segment(info.name);
segment.search = false;
segment.committed = true;
segment.docCount = info.docCount;
try {
segment.delDocCount = indexWriter.numDeletedDocs(info);
} catch (IOException e) {
logger.trace("failed to get deleted docs for committed segment", e);
}
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
} else {
segment.committed = true;
}
}
}
Segment[] segmentsArr = segments.values().toArray(new Segment[segments.values().size()]);
Arrays.sort(segmentsArr, new Comparator<Segment>() {
@Override public int compare(Segment o1, Segment o2) {
return (int) (o1.generation() - o2.generation());
}
});
return Arrays.asList(segmentsArr);
} finally {
rwl.readLock().unlock();
}
}
@Override public void close() throws ElasticSearchException {
rwl.writeLock().lock();
try {
innerClose();
} finally {
rwl.writeLock().unlock();
}
}
private void failEngine(Throwable failure) {
synchronized (failedEngineMutex) {
if (failedEngine != null) {
return;
}
logger.warn("failed engine", failure);
failedEngine = failure;
for (FailedEngineListener listener : failedEngineListeners) {
listener.onFailedEngine(shardId, failure);
}
innerClose();
}
}
private void innerClose() {
if (closed) {
return;
}
indexSettingsService.removeListener(applySettings);
closed = true;
this.versionMap.clear();
this.failedEngineListeners.clear();
try {
if (nrtResource != null) {
this.nrtResource.forceClose();
}
// no need to commit in this case!, we snapshot before we close the shard, so translog and all sync'ed
if (indexWriter != null) {
try {
indexWriter.rollback();
} catch (AlreadyClosedException e) {
// ignore
}
}
} catch (Exception e) {
logger.debug("failed to rollback writer on close", e);
} finally {
indexWriter = null;
}
}
private Object dirtyLock(String id) {
int hash = id.hashCode();
// abs returns Integer.MIN_VALUE, so we need to protect against it...
if (hash == Integer.MIN_VALUE) {
hash = 0;
}
return dirtyLocks[Math.abs(hash) % dirtyLocks.length];
}
private Object dirtyLock(Term uid) {
return dirtyLock(uid.text());
}
private long loadCurrentVersionFromIndex(Term uid) {
UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(uid.text());
Searcher searcher = searcher();
try {
for (IndexReader reader : searcher.searcher().subReaders()) {
BloomFilter filter = bloomCache.filter(reader, UidFieldMapper.NAME, asyncLoadBloomFilter);
// we know that its not there...
if (!filter.isPresent(utf8.result, 0, utf8.length)) {
continue;
}
long version = UidField.loadVersion(reader, uid);
// either -2 (its there, but no version associated), or an actual version
if (version != -1) {
return version;
}
}
return -1;
} finally {
searcher.release();
}
}
private IndexWriter createWriter() throws IOException {
IndexWriter indexWriter = null;
try {
// release locks when started
if (IndexWriter.isLocked(store.directory())) {
logger.warn("shard is locked, releasing lock");
IndexWriter.unlock(store.directory());
}
boolean create = !IndexReader.indexExists(store.directory());
IndexWriterConfig config = new IndexWriterConfig(Lucene.VERSION, analysisService.defaultIndexAnalyzer());
config.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
config.setIndexDeletionPolicy(deletionPolicy);
config.setMergeScheduler(mergeScheduler.newMergeScheduler());
config.setMergePolicy(mergePolicyProvider.newMergePolicy());
config.setSimilarity(similarityService.defaultIndexSimilarity());
config.setRAMBufferSizeMB(indexingBufferSize.mbFrac());
config.setTermIndexInterval(termIndexInterval);
config.setReaderTermsIndexDivisor(termIndexDivisor);
config.setMaxThreadStates(indexConcurrency);
indexWriter = new IndexWriter(store.directory(), config);
} catch (IOException e) {
safeClose(indexWriter);
throw e;
}
return indexWriter;
}
static {
IndexMetaData.addDynamicSettings(
"index.term_index_interval",
"index.term_index_divisor",
"index.index_concurrency",
"index.gc_deletes"
);
}
class ApplySettings implements IndexSettingsService.Listener {
@Override public void onRefreshSettings(Settings settings) {
long gcDeletesInMillis = indexSettings.getAsTime("index.gc_deletes", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis)).millis();
if (gcDeletesInMillis != RobinEngine.this.gcDeletesInMillis) {
logger.info("updating index.gc_deletes from [{}] to [{}]", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis), TimeValue.timeValueMillis(gcDeletesInMillis));
RobinEngine.this.gcDeletesInMillis = gcDeletesInMillis;
}
int termIndexInterval = settings.getAsInt("index.term_index_interval", RobinEngine.this.termIndexInterval);
int termIndexDivisor = settings.getAsInt("index.term_index_divisor", RobinEngine.this.termIndexDivisor); // IndexReader#DEFAULT_TERMS_INDEX_DIVISOR
int indexConcurrency = settings.getAsInt("index.index_concurrency", RobinEngine.this.indexConcurrency);
boolean requiresFlushing = false;
if (termIndexInterval != RobinEngine.this.termIndexInterval || termIndexDivisor != RobinEngine.this.termIndexDivisor) {
rwl.readLock().lock();
try {
if (termIndexInterval != RobinEngine.this.termIndexInterval) {
logger.info("updating index.term_index_interval from [{}] to [{}]", RobinEngine.this.termIndexInterval, termIndexInterval);
RobinEngine.this.termIndexInterval = termIndexInterval;
indexWriter.getConfig().setTermIndexInterval(termIndexInterval);
}
if (termIndexDivisor != RobinEngine.this.termIndexDivisor) {
logger.info("updating index.term_index_divisor from [{}] to [{}]", RobinEngine.this.termIndexDivisor, termIndexDivisor);
RobinEngine.this.termIndexDivisor = termIndexDivisor;
indexWriter.getConfig().setReaderTermsIndexDivisor(termIndexDivisor);
// we want to apply this right now for readers, even "current" ones
requiresFlushing = true;
}
if (indexConcurrency != RobinEngine.this.indexConcurrency) {
logger.info("updating index.index_concurrency from [{}] to [{}]", RobinEngine.this.indexConcurrency, indexConcurrency);
RobinEngine.this.indexConcurrency = indexConcurrency;
// we have to flush in this case, since it only applies on a new index writer
requiresFlushing = true;
}
} finally {
rwl.readLock().unlock();
}
if (requiresFlushing) {
flush(new Flush().full(true));
}
}
}
}
private AcquirableResource<ReaderSearcherHolder> buildNrtResource(IndexWriter indexWriter) throws IOException {
IndexReader indexReader = IndexReader.open(indexWriter, true);
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(indexReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
return newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
}
private static class RobinSearchResult implements Searcher {
private final AcquirableResource<ReaderSearcherHolder> nrtHolder;
private RobinSearchResult(AcquirableResource<ReaderSearcherHolder> nrtHolder) {
this.nrtHolder = nrtHolder;
}
@Override public IndexReader reader() {
return nrtHolder.resource().reader();
}
@Override public ExtendedIndexSearcher searcher() {
return nrtHolder.resource().searcher();
}
@Override public boolean release() throws ElasticSearchException {
nrtHolder.release();
return true;
}
}
static class VersionValue {
private final long version;
private final boolean delete;
private final long time;
private final Translog.Location translogLocation;
VersionValue(long version, boolean delete, long time, Translog.Location translogLocation) {
this.version = version;
this.delete = delete;
this.time = time;
this.translogLocation = translogLocation;
}
public long time() {
return this.time;
}
public long version() {
return version;
}
public boolean delete() {
return delete;
}
public Translog.Location translogLocation() {
return this.translogLocation;
}
}
}
| true | true | private void innerCreate(Create create, IndexWriter writer) throws IOException {
synchronized (dirtyLock(create.uid())) {
UidField uidField = create.uidField();
if (create.origin() == Operation.Origin.RECOVERY) {
uidField.version(create.version());
// we use update doc and not addDoc since we might get duplicates when using transient translog
if (create.docs().size() > 1) {
writer.updateDocuments(create.uid(), create.docs(), create.analyzer());
} else {
writer.updateDocument(create.uid(), create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
// on recovery, we get the actual version we want to use
if (create.version() != 0) {
versionMap.put(create.uid().text(), new VersionValue(create.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(create.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(create.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
// same logic as index
long updatedVersion;
if (create.origin() == Operation.Origin.PRIMARY) {
if (create.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = create.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= create.version()) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, create.version());
}
}
updatedVersion = create.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = create.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && create.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = create.version();
}
// if the doc does not exists or it exists but not delete
if (versionValue != null) {
if (!versionValue.delete()) {
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
} else if (currentVersion != -1) {
// its not deleted, its already there
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
uidField.version(updatedVersion);
create.version(updatedVersion);
if (create.docs().size() > 1) {
writer.addDocuments(create.docs(), create.analyzer());
} else {
writer.addDocument(create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
versionMap.put(create.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void index(Index index) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerIndex(index, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new IndexFailedEngineException(shardId, index, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new IndexFailedEngineException(shardId, index, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerIndex(Index index, IndexWriter writer) throws IOException {
synchronized (dirtyLock(index.uid())) {
UidField uidField = index.uidField();
if (index.origin() == Operation.Origin.RECOVERY) {
uidField.version(index.version());
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
// on recovery, we get the actual version we want to use
if (index.version() != 0) {
versionMap.put(index.uid().text(), new VersionValue(index.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(index.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(index.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (index.origin() == Operation.Origin.PRIMARY) {
if (index.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = index.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= index.version()) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, index.version());
}
}
updatedVersion = index.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = index.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && index.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = index.version();
}
uidField.version(updatedVersion);
index.version(updatedVersion);
if (currentVersion == -1) {
// document does not exists, we can optimize for create
if (index.docs().size() > 1) {
writer.addDocuments(index.docs(), index.analyzer());
} else {
writer.addDocument(index.docs().get(0), index.analyzer());
}
} else {
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
versionMap.put(index.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void delete(Delete delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerDelete(delete, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteFailedEngineException(shardId, delete, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new DeleteFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerDelete(Delete delete, IndexWriter writer) throws IOException {
synchronized (dirtyLock(delete.uid())) {
if (delete.origin() == Operation.Origin.RECOVERY) {
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
// update the version with the exact version from recovery, assuming we have it
if (delete.version() != 0) {
versionMap.put(delete.uid().text(), new VersionValue(delete.version(), true, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(delete.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(delete.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (delete.origin() == Operation.Origin.PRIMARY) {
if (delete.versionType() == VersionType.INTERNAL) { // internal version type
if (delete.version() != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (delete.version() != currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // External
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (currentVersion >= delete.version()) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
updatedVersion = delete.version();
}
} else { // if (delete.origin() == Operation.Origin.REPLICA) {
// on replica, the version is the future value expected (returned from the operation on the primary)
if (currentVersion != -2) { // -2 means we don't have a version in the index, ignore
// only check if we have a version for it, otherwise, ignore (see later)
if (currentVersion != -1) {
// with replicas, we only check for previous version, we allow to set a future version
if (delete.version() <= currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion - 1, delete.version());
}
}
}
// replicas already hold the "future" version
updatedVersion = delete.version();
}
if (currentVersion == -1) {
// if the doc does not exists, just update with doc 0
delete.version(0).notFound(true);
} else if (versionValue != null && versionValue.delete()) {
// if its a delete on delete and we have the current delete version, return it
delete.version(versionValue.version()).notFound(true);
} else {
delete.version(updatedVersion);
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.put(delete.uid().text(), new VersionValue(updatedVersion, true, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
}
@Override public void delete(DeleteByQuery delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId);
}
Query query;
if (delete.aliasFilter() == null) {
query = delete.query();
} else {
query = new FilteredQuery(delete.query(), delete.aliasFilter());
}
writer.deleteDocuments(query);
translog.add(new Translog.DeleteByQuery(delete));
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteByQueryFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
//TODO: This is heavy, since we refresh, but we really have to...
refreshVersioningTable(System.currentTimeMillis());
}
@Override public Searcher searcher() throws EngineException {
AcquirableResource<ReaderSearcherHolder> holder;
for (; ; ) {
holder = this.nrtResource;
if (holder.acquire()) {
break;
}
Thread.yield();
}
return new RobinSearchResult(holder);
}
@Override public boolean refreshNeeded() {
return dirty;
}
@Override public boolean possibleMergeNeeded() {
return this.possibleMergeNeeded;
}
@Override public void refresh(Refresh refresh) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId);
}
// we obtain a read lock here, since we don't want a flush to happen while we are refreshing
// since it flushes the index as well (though, in terms of concurrency, we are allowed to do it)
rwl.readLock().lock();
try {
// this engine always acts as if waitForOperations=true
IndexWriter currentWriter = indexWriter;
if (currentWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
try {
// we need to obtain a mutex here, to make sure we don't leave dangling readers
// we could have used an AtomicBoolean#compareAndSet, but, then we might miss refresh requests
// compared to on going ones
synchronized (refreshMutex) {
if (dirty || refresh.force()) {
dirty = false;
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
IndexReader newReader = current.resource().reader().reopen(true);
if (newReader != current.resource().reader()) {
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(newReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
nrtResource = newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
current.markForClose();
}
}
}
} catch (AlreadyClosedException e) {
// an index writer got replaced on us, ignore
} catch (Exception e) {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
} else if (currentWriter != indexWriter) {
// an index writer got replaced on us, ignore
} else {
throw new RefreshFailedEngineException(shardId, e);
}
} catch (OutOfMemoryError e) {
failEngine(e);
throw new RefreshFailedEngineException(shardId, e);
}
} finally {
rwl.readLock().unlock();
}
}
@Override public void flush(Flush flush) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
// check outside the lock as well so we can check without blocking on the write lock
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// don't allow for concurrent flush operations...
if (!flushing.compareAndSet(false, true)) {
throw new FlushNotAllowedEngineException(shardId, "Already flushing...");
}
try {
if (flush.full()) {
rwl.writeLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// disable refreshing, not dirty
dirty = false;
try {
// that's ok if the index writer failed and is in inconsistent state
// we will get an exception on a dirty operation, and will cause the shard
// to be allocated to a different node
indexWriter.close(false);
indexWriter = createWriter();
if (flushNeeded || flush.force()) {
flushNeeded = false;
long translogId = translogIdGenerator.incrementAndGet();
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
translog.newTranslog(translogId);
}
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
nrtResource = buildNrtResource(indexWriter);
current.markForClose();
} catch (Exception e) {
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
} finally {
rwl.writeLock().unlock();
}
} else {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
if (flushNeeded || flush.force()) {
flushNeeded = false;
try {
long translogId = translogIdGenerator.incrementAndGet();
translog.newTransientTranslog(translogId);
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
translog.makeTransientCurrent();
} catch (Exception e) {
translog.revertTransient();
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
translog.revertTransient();
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
}
} finally {
rwl.readLock().unlock();
}
}
refreshVersioningTable(threadPool.estimatedTimeInMillis());
try {
SegmentInfos infos = new SegmentInfos();
infos.read(store.directory());
lastCommittedSegmentInfos = infos;
} catch (Exception e) {
logger.warn("failed to read latest segment infos on flush", e);
}
} finally {
flushing.set(false);
}
}
private void refreshVersioningTable(long time) {
// we need to refresh in order to clear older version values
refresh(new Refresh(true).force(true));
for (Map.Entry<String, VersionValue> entry : versionMap.entrySet()) {
String id = entry.getKey();
synchronized (dirtyLock(id)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set?
VersionValue versionValue = versionMap.get(id);
if (versionValue == null) {
continue;
}
if (time - versionValue.time() <= 0) {
continue; // its a newer value, from after/during we refreshed, don't clear it
}
if (versionValue.delete()) {
if ((time - versionValue.time()) > gcDeletesInMillis) {
versionMap.remove(id);
}
} else {
versionMap.remove(id);
}
}
}
}
@Override public void maybeMerge() throws EngineException {
if (!possibleMergeNeeded) {
return;
}
possibleMergeNeeded = false;
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
indexWriter.maybeMerge();
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
}
}
@Override public void optimize(Optimize optimize) throws EngineException {
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimizeMutex.compareAndSet(false, true)) {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
if (optimize.onlyExpungeDeletes()) {
indexWriter.expungeDeletes(false);
} else if (optimize.maxNumSegments() <= 0) {
indexWriter.maybeMerge();
possibleMergeNeeded = false;
} else {
indexWriter.optimize(optimize.maxNumSegments(), false);
}
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
optimizeMutex.set(false);
}
}
// wait for the merges outside of the read lock
if (optimize.waitForMerge()) {
indexWriter.waitForMerges();
}
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimize.refresh()) {
refresh(new Refresh(false).force(true));
}
}
@Override public <T> T snapshot(SnapshotHandler<T> snapshotHandler) throws EngineException {
SnapshotIndexCommit snapshotIndexCommit = null;
Translog.Snapshot traslogSnapshot = null;
rwl.readLock().lock();
try {
snapshotIndexCommit = deletionPolicy.snapshot();
traslogSnapshot = translog.snapshot();
} catch (Exception e) {
if (snapshotIndexCommit != null) snapshotIndexCommit.release();
throw new SnapshotFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
}
try {
return snapshotHandler.snapshot(snapshotIndexCommit, traslogSnapshot);
} finally {
snapshotIndexCommit.release();
traslogSnapshot.release();
}
}
@Override public void recover(RecoveryHandler recoveryHandler) throws EngineException {
// take a write lock here so it won't happen while a flush is in progress
// this means that next commits will not be allowed once the lock is released
rwl.writeLock().lock();
try {
disableFlushCounter++;
} finally {
rwl.writeLock().unlock();
}
SnapshotIndexCommit phase1Snapshot;
try {
phase1Snapshot = deletionPolicy.snapshot();
} catch (Exception e) {
--disableFlushCounter;
throw new RecoveryEngineException(shardId, 1, "Snapshot failed", e);
}
try {
recoveryHandler.phase1(phase1Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 1, "Execution failed", e);
}
Translog.Snapshot phase2Snapshot;
try {
phase2Snapshot = translog.snapshot();
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Snapshot failed", e);
}
try {
recoveryHandler.phase2(phase2Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
phase2Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Execution failed", e);
}
rwl.writeLock().lock();
Translog.Snapshot phase3Snapshot = null;
try {
phase3Snapshot = translog.snapshot(phase2Snapshot);
recoveryHandler.phase3(phase3Snapshot);
} catch (Exception e) {
throw new RecoveryEngineException(shardId, 3, "Execution failed", e);
} finally {
--disableFlushCounter;
rwl.writeLock().unlock();
phase1Snapshot.release();
phase2Snapshot.release();
if (phase3Snapshot != null) {
phase3Snapshot.release();
}
}
}
@Override public List<Segment> segments() {
rwl.readLock().lock();
try {
IndexWriter indexWriter = this.indexWriter;
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
Map<String, Segment> segments = new HashMap<String, Segment>();
// first, go over and compute the search ones...
Searcher searcher = searcher();
try {
IndexReader[] readers = searcher.reader().getSequentialSubReaders();
for (IndexReader reader : readers) {
assert reader instanceof SegmentReader;
SegmentInfo info = Lucene.getSegmentInfo((SegmentReader) reader);
assert !segments.containsKey(info.name);
Segment segment = new Segment(info.name);
segment.search = true;
segment.docCount = reader.numDocs();
segment.delDocCount = reader.numDeletedDocs();
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
}
} finally {
searcher.release();
}
// now, correlate or add the committed ones...
if (lastCommittedSegmentInfos != null) {
SegmentInfos infos = lastCommittedSegmentInfos;
for (SegmentInfo info : infos) {
Segment segment = segments.get(info.name);
if (segment == null) {
segment = new Segment(info.name);
segment.search = false;
segment.committed = true;
segment.docCount = info.docCount;
try {
segment.delDocCount = indexWriter.numDeletedDocs(info);
} catch (IOException e) {
logger.trace("failed to get deleted docs for committed segment", e);
}
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
} else {
segment.committed = true;
}
}
}
Segment[] segmentsArr = segments.values().toArray(new Segment[segments.values().size()]);
Arrays.sort(segmentsArr, new Comparator<Segment>() {
@Override public int compare(Segment o1, Segment o2) {
return (int) (o1.generation() - o2.generation());
}
});
return Arrays.asList(segmentsArr);
} finally {
rwl.readLock().unlock();
}
}
@Override public void close() throws ElasticSearchException {
rwl.writeLock().lock();
try {
innerClose();
} finally {
rwl.writeLock().unlock();
}
}
private void failEngine(Throwable failure) {
synchronized (failedEngineMutex) {
if (failedEngine != null) {
return;
}
logger.warn("failed engine", failure);
failedEngine = failure;
for (FailedEngineListener listener : failedEngineListeners) {
listener.onFailedEngine(shardId, failure);
}
innerClose();
}
}
private void innerClose() {
if (closed) {
return;
}
indexSettingsService.removeListener(applySettings);
closed = true;
this.versionMap.clear();
this.failedEngineListeners.clear();
try {
if (nrtResource != null) {
this.nrtResource.forceClose();
}
// no need to commit in this case!, we snapshot before we close the shard, so translog and all sync'ed
if (indexWriter != null) {
try {
indexWriter.rollback();
} catch (AlreadyClosedException e) {
// ignore
}
}
} catch (Exception e) {
logger.debug("failed to rollback writer on close", e);
} finally {
indexWriter = null;
}
}
private Object dirtyLock(String id) {
int hash = id.hashCode();
// abs returns Integer.MIN_VALUE, so we need to protect against it...
if (hash == Integer.MIN_VALUE) {
hash = 0;
}
return dirtyLocks[Math.abs(hash) % dirtyLocks.length];
}
private Object dirtyLock(Term uid) {
return dirtyLock(uid.text());
}
private long loadCurrentVersionFromIndex(Term uid) {
UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(uid.text());
Searcher searcher = searcher();
try {
for (IndexReader reader : searcher.searcher().subReaders()) {
BloomFilter filter = bloomCache.filter(reader, UidFieldMapper.NAME, asyncLoadBloomFilter);
// we know that its not there...
if (!filter.isPresent(utf8.result, 0, utf8.length)) {
continue;
}
long version = UidField.loadVersion(reader, uid);
// either -2 (its there, but no version associated), or an actual version
if (version != -1) {
return version;
}
}
return -1;
} finally {
searcher.release();
}
}
private IndexWriter createWriter() throws IOException {
IndexWriter indexWriter = null;
try {
// release locks when started
if (IndexWriter.isLocked(store.directory())) {
logger.warn("shard is locked, releasing lock");
IndexWriter.unlock(store.directory());
}
boolean create = !IndexReader.indexExists(store.directory());
IndexWriterConfig config = new IndexWriterConfig(Lucene.VERSION, analysisService.defaultIndexAnalyzer());
config.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
config.setIndexDeletionPolicy(deletionPolicy);
config.setMergeScheduler(mergeScheduler.newMergeScheduler());
config.setMergePolicy(mergePolicyProvider.newMergePolicy());
config.setSimilarity(similarityService.defaultIndexSimilarity());
config.setRAMBufferSizeMB(indexingBufferSize.mbFrac());
config.setTermIndexInterval(termIndexInterval);
config.setReaderTermsIndexDivisor(termIndexDivisor);
config.setMaxThreadStates(indexConcurrency);
indexWriter = new IndexWriter(store.directory(), config);
} catch (IOException e) {
safeClose(indexWriter);
throw e;
}
return indexWriter;
}
static {
IndexMetaData.addDynamicSettings(
"index.term_index_interval",
"index.term_index_divisor",
"index.index_concurrency",
"index.gc_deletes"
);
}
class ApplySettings implements IndexSettingsService.Listener {
@Override public void onRefreshSettings(Settings settings) {
long gcDeletesInMillis = indexSettings.getAsTime("index.gc_deletes", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis)).millis();
if (gcDeletesInMillis != RobinEngine.this.gcDeletesInMillis) {
logger.info("updating index.gc_deletes from [{}] to [{}]", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis), TimeValue.timeValueMillis(gcDeletesInMillis));
RobinEngine.this.gcDeletesInMillis = gcDeletesInMillis;
}
int termIndexInterval = settings.getAsInt("index.term_index_interval", RobinEngine.this.termIndexInterval);
int termIndexDivisor = settings.getAsInt("index.term_index_divisor", RobinEngine.this.termIndexDivisor); // IndexReader#DEFAULT_TERMS_INDEX_DIVISOR
int indexConcurrency = settings.getAsInt("index.index_concurrency", RobinEngine.this.indexConcurrency);
boolean requiresFlushing = false;
if (termIndexInterval != RobinEngine.this.termIndexInterval || termIndexDivisor != RobinEngine.this.termIndexDivisor) {
rwl.readLock().lock();
try {
if (termIndexInterval != RobinEngine.this.termIndexInterval) {
logger.info("updating index.term_index_interval from [{}] to [{}]", RobinEngine.this.termIndexInterval, termIndexInterval);
RobinEngine.this.termIndexInterval = termIndexInterval;
indexWriter.getConfig().setTermIndexInterval(termIndexInterval);
}
if (termIndexDivisor != RobinEngine.this.termIndexDivisor) {
logger.info("updating index.term_index_divisor from [{}] to [{}]", RobinEngine.this.termIndexDivisor, termIndexDivisor);
RobinEngine.this.termIndexDivisor = termIndexDivisor;
indexWriter.getConfig().setReaderTermsIndexDivisor(termIndexDivisor);
// we want to apply this right now for readers, even "current" ones
requiresFlushing = true;
}
if (indexConcurrency != RobinEngine.this.indexConcurrency) {
logger.info("updating index.index_concurrency from [{}] to [{}]", RobinEngine.this.indexConcurrency, indexConcurrency);
RobinEngine.this.indexConcurrency = indexConcurrency;
// we have to flush in this case, since it only applies on a new index writer
requiresFlushing = true;
}
} finally {
rwl.readLock().unlock();
}
if (requiresFlushing) {
flush(new Flush().full(true));
}
}
}
}
private AcquirableResource<ReaderSearcherHolder> buildNrtResource(IndexWriter indexWriter) throws IOException {
IndexReader indexReader = IndexReader.open(indexWriter, true);
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(indexReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
return newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
}
private static class RobinSearchResult implements Searcher {
private final AcquirableResource<ReaderSearcherHolder> nrtHolder;
private RobinSearchResult(AcquirableResource<ReaderSearcherHolder> nrtHolder) {
this.nrtHolder = nrtHolder;
}
@Override public IndexReader reader() {
return nrtHolder.resource().reader();
}
@Override public ExtendedIndexSearcher searcher() {
return nrtHolder.resource().searcher();
}
@Override public boolean release() throws ElasticSearchException {
nrtHolder.release();
return true;
}
}
static class VersionValue {
private final long version;
private final boolean delete;
private final long time;
private final Translog.Location translogLocation;
VersionValue(long version, boolean delete, long time, Translog.Location translogLocation) {
this.version = version;
this.delete = delete;
this.time = time;
this.translogLocation = translogLocation;
}
public long time() {
return this.time;
}
public long version() {
return version;
}
public boolean delete() {
return delete;
}
public Translog.Location translogLocation() {
return this.translogLocation;
}
}
}
| private void innerCreate(Create create, IndexWriter writer) throws IOException {
synchronized (dirtyLock(create.uid())) {
UidField uidField = create.uidField();
if (create.origin() == Operation.Origin.RECOVERY) {
uidField.version(create.version());
// we use update doc and not addDoc since we might get duplicates when using transient translog
if (create.docs().size() > 1) {
writer.updateDocuments(create.uid(), create.docs(), create.analyzer());
} else {
writer.updateDocument(create.uid(), create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
// on recovery, we get the actual version we want to use
if (create.version() != 0) {
versionMap.put(create.uid().text(), new VersionValue(create.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(create.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(create.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
// same logic as index
long updatedVersion;
if (create.origin() == Operation.Origin.PRIMARY) {
if (create.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = create.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= create.version()) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, create.version());
}
}
updatedVersion = create.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = create.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && create.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, create.type(), create.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = create.version();
}
// if the doc does not exists or it exists but not delete
if (versionValue != null) {
if (!versionValue.delete()) {
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
} else if (currentVersion != -1) {
// its not deleted, its already there
throw new DocumentAlreadyExistsEngineException(shardId, create.type(), create.id());
}
uidField.version(updatedVersion);
create.version(updatedVersion);
if (create.docs().size() > 1) {
writer.addDocuments(create.docs(), create.analyzer());
} else {
writer.addDocument(create.docs().get(0), create.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Create(create));
versionMap.put(create.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void index(Index index) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerIndex(index, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new IndexFailedEngineException(shardId, index, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new IndexFailedEngineException(shardId, index, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerIndex(Index index, IndexWriter writer) throws IOException {
synchronized (dirtyLock(index.uid())) {
UidField uidField = index.uidField();
if (index.origin() == Operation.Origin.RECOVERY) {
uidField.version(index.version());
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
// on recovery, we get the actual version we want to use
if (index.version() != 0) {
versionMap.put(index.uid().text(), new VersionValue(index.version(), false, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(index.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(index.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (index.origin() == Operation.Origin.PRIMARY) {
if (index.versionType() == VersionType.INTERNAL) { // internal version type
long expectedVersion = index.version();
if (expectedVersion != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
// this is important, since we don't allow to preset a version in order to handle deletes
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), -1, expectedVersion);
} else if (expectedVersion != currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // external version type
// an external version is provided, just check, if a local version exists, that its higher than it
// the actual version checking is one in an external system, and we just want to not index older versions
if (currentVersion >= 0) { // we can check!, its there
if (currentVersion >= index.version()) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, index.version());
}
}
updatedVersion = index.version();
}
} else { // if (index.origin() == Operation.Origin.REPLICA) {
long expectedVersion = index.version();
if (currentVersion != -2) { // -2 means we don't have a version, so ignore...
// if it does not exists, and its considered the first index operation (replicas are 1 of)
// then nothing to do
if (!(currentVersion == -1 && index.version() == 1)) {
// with replicas, we only check for previous version, we allow to set a future version
if (expectedVersion <= currentVersion) {
throw new VersionConflictEngineException(shardId, index.type(), index.id(), currentVersion, expectedVersion);
}
}
}
// replicas already hold the "future" version
updatedVersion = index.version();
}
uidField.version(updatedVersion);
index.version(updatedVersion);
if (currentVersion == -1) {
// document does not exists, we can optimize for create
if (index.docs().size() > 1) {
writer.addDocuments(index.docs(), index.analyzer());
} else {
writer.addDocument(index.docs().get(0), index.analyzer());
}
} else {
if (index.docs().size() > 1) {
writer.updateDocuments(index.uid(), index.docs(), index.analyzer());
} else {
writer.updateDocument(index.uid(), index.docs().get(0), index.analyzer());
}
}
Translog.Location translogLocation = translog.add(new Translog.Index(index));
versionMap.put(index.uid().text(), new VersionValue(updatedVersion, false, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
@Override public void delete(Delete delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId, failedEngine);
}
innerDelete(delete, writer);
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteFailedEngineException(shardId, delete, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new DeleteFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
}
private void innerDelete(Delete delete, IndexWriter writer) throws IOException {
synchronized (dirtyLock(delete.uid())) {
if (delete.origin() == Operation.Origin.RECOVERY) {
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
// update the version with the exact version from recovery, assuming we have it
if (delete.version() != 0) {
versionMap.put(delete.uid().text(), new VersionValue(delete.version(), true, threadPool.estimatedTimeInMillis(), translogLocation));
}
} else {
long currentVersion;
VersionValue versionValue = versionMap.get(delete.uid().text());
if (versionValue == null) {
currentVersion = loadCurrentVersionFromIndex(delete.uid());
} else {
if (versionValue.delete() && (threadPool.estimatedTimeInMillis() - versionValue.time()) > gcDeletesInMillis) {
currentVersion = -1; // deleted, and GC
} else {
currentVersion = versionValue.version();
}
}
long updatedVersion;
if (delete.origin() == Operation.Origin.PRIMARY) {
if (delete.versionType() == VersionType.INTERNAL) { // internal version type
if (delete.version() != 0 && currentVersion != -2) { // -2 means we don't have a version, so ignore...
// an explicit version is provided, see if there is a conflict
// if the current version is -1, means we did not find anything, and
// a version is provided, so we do expect to find a doc under that version
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (delete.version() != currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
}
updatedVersion = currentVersion < 0 ? 1 : currentVersion + 1;
} else { // External
if (currentVersion == -1) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), -1, delete.version());
} else if (currentVersion >= delete.version()) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion, delete.version());
}
updatedVersion = delete.version();
}
} else { // if (delete.origin() == Operation.Origin.REPLICA) {
// on replica, the version is the future value expected (returned from the operation on the primary)
if (currentVersion != -2) { // -2 means we don't have a version in the index, ignore
// only check if we have a version for it, otherwise, ignore (see later)
if (currentVersion != -1) {
// with replicas, we only check for previous version, we allow to set a future version
if (delete.version() <= currentVersion) {
throw new VersionConflictEngineException(shardId, delete.type(), delete.id(), currentVersion - 1, delete.version());
}
}
}
// replicas already hold the "future" version
updatedVersion = delete.version();
}
if (currentVersion == -1) {
// if the doc does not exists, just update with doc 0
delete.version(0).notFound(true);
} else if (versionValue != null && versionValue.delete()) {
// if its a delete on delete and we have the current delete version, return it
delete.version(versionValue.version()).notFound(true);
} else {
delete.version(updatedVersion);
writer.deleteDocuments(delete.uid());
Translog.Location translogLocation = translog.add(new Translog.Delete(delete));
versionMap.put(delete.uid().text(), new VersionValue(updatedVersion, true, threadPool.estimatedTimeInMillis(), translogLocation));
}
}
}
}
@Override public void delete(DeleteByQuery delete) throws EngineException {
rwl.readLock().lock();
try {
IndexWriter writer = this.indexWriter;
if (writer == null) {
throw new EngineClosedException(shardId);
}
Query query;
if (delete.aliasFilter() == null) {
query = delete.query();
} else {
query = new FilteredQuery(delete.query(), delete.aliasFilter());
}
writer.deleteDocuments(query);
translog.add(new Translog.DeleteByQuery(delete));
dirty = true;
possibleMergeNeeded = true;
flushNeeded = true;
} catch (IOException e) {
throw new DeleteByQueryFailedEngineException(shardId, delete, e);
} finally {
rwl.readLock().unlock();
}
//TODO: This is heavy, since we refresh, but we really have to...
refreshVersioningTable(System.currentTimeMillis());
}
@Override public Searcher searcher() throws EngineException {
AcquirableResource<ReaderSearcherHolder> holder;
for (; ; ) {
holder = this.nrtResource;
if (holder.acquire()) {
break;
}
Thread.yield();
}
return new RobinSearchResult(holder);
}
@Override public boolean refreshNeeded() {
return dirty;
}
@Override public boolean possibleMergeNeeded() {
return this.possibleMergeNeeded;
}
@Override public void refresh(Refresh refresh) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId);
}
// we obtain a read lock here, since we don't want a flush to happen while we are refreshing
// since it flushes the index as well (though, in terms of concurrency, we are allowed to do it)
rwl.readLock().lock();
try {
// this engine always acts as if waitForOperations=true
IndexWriter currentWriter = indexWriter;
if (currentWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
try {
// we need to obtain a mutex here, to make sure we don't leave dangling readers
// we could have used an AtomicBoolean#compareAndSet, but, then we might miss refresh requests
// compared to on going ones
synchronized (refreshMutex) {
if (dirty || refresh.force()) {
dirty = false;
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
IndexReader newReader = current.resource().reader().reopen(true);
if (newReader != current.resource().reader()) {
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(newReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
nrtResource = newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
current.markForClose();
}
}
}
} catch (AlreadyClosedException e) {
// an index writer got replaced on us, ignore
} catch (Exception e) {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
} else if (currentWriter != indexWriter) {
// an index writer got replaced on us, ignore
} else {
throw new RefreshFailedEngineException(shardId, e);
}
} catch (OutOfMemoryError e) {
failEngine(e);
throw new RefreshFailedEngineException(shardId, e);
}
} finally {
rwl.readLock().unlock();
}
}
@Override public void flush(Flush flush) throws EngineException {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
// check outside the lock as well so we can check without blocking on the write lock
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// don't allow for concurrent flush operations...
if (!flushing.compareAndSet(false, true)) {
throw new FlushNotAllowedEngineException(shardId, "Already flushing...");
}
try {
if (flush.full()) {
rwl.writeLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
// disable refreshing, not dirty
dirty = false;
try {
// that's ok if the index writer failed and is in inconsistent state
// we will get an exception on a dirty operation, and will cause the shard
// to be allocated to a different node
indexWriter.close(false);
indexWriter = createWriter();
if (flushNeeded || flush.force()) {
flushNeeded = false;
long translogId = translogIdGenerator.incrementAndGet();
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
translog.newTranslog(translogId);
}
AcquirableResource<ReaderSearcherHolder> current = nrtResource;
nrtResource = buildNrtResource(indexWriter);
current.markForClose();
} catch (Exception e) {
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
} finally {
rwl.writeLock().unlock();
}
} else {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (disableFlushCounter > 0) {
throw new FlushNotAllowedEngineException(shardId, "Recovery is in progress, flush is not allowed");
}
if (flushNeeded || flush.force()) {
flushNeeded = false;
try {
long translogId = translogIdGenerator.incrementAndGet();
translog.newTransientTranslog(translogId);
indexWriter.commit(MapBuilder.<String, String>newMapBuilder().put(Translog.TRANSLOG_ID_KEY, Long.toString(translogId)).map());
if (flush.force()) {
// if we force, we might not have committed, we need to check that its the same id
Map<String, String> commitUserData = IndexReader.getCommitUserData(store.directory());
long committedTranslogId = Long.parseLong(commitUserData.get(Translog.TRANSLOG_ID_KEY));
if (committedTranslogId != translogId) {
// we did not commit anything, revert to the old translog
translog.revertTransient();
} else {
translog.makeTransientCurrent();
}
} else {
translog.makeTransientCurrent();
}
} catch (Exception e) {
translog.revertTransient();
throw new FlushFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
translog.revertTransient();
failEngine(e);
throw new FlushFailedEngineException(shardId, e);
}
}
} finally {
rwl.readLock().unlock();
}
}
refreshVersioningTable(threadPool.estimatedTimeInMillis());
try {
SegmentInfos infos = new SegmentInfos();
infos.read(store.directory());
lastCommittedSegmentInfos = infos;
} catch (Exception e) {
logger.warn("failed to read latest segment infos on flush", e);
}
} finally {
flushing.set(false);
}
}
private void refreshVersioningTable(long time) {
// we need to refresh in order to clear older version values
refresh(new Refresh(true).force(true));
for (Map.Entry<String, VersionValue> entry : versionMap.entrySet()) {
String id = entry.getKey();
synchronized (dirtyLock(id)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set?
VersionValue versionValue = versionMap.get(id);
if (versionValue == null) {
continue;
}
if (time - versionValue.time() <= 0) {
continue; // its a newer value, from after/during we refreshed, don't clear it
}
if (versionValue.delete()) {
if ((time - versionValue.time()) > gcDeletesInMillis) {
versionMap.remove(id);
}
} else {
versionMap.remove(id);
}
}
}
}
@Override public void maybeMerge() throws EngineException {
if (!possibleMergeNeeded) {
return;
}
possibleMergeNeeded = false;
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
indexWriter.maybeMerge();
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
}
}
@Override public void optimize(Optimize optimize) throws EngineException {
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimizeMutex.compareAndSet(false, true)) {
rwl.readLock().lock();
try {
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
if (indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).enableMerge();
}
if (optimize.onlyExpungeDeletes()) {
indexWriter.expungeDeletes(false);
} else if (optimize.maxNumSegments() <= 0) {
indexWriter.maybeMerge();
possibleMergeNeeded = false;
} else {
indexWriter.optimize(optimize.maxNumSegments(), false);
}
} catch (Exception e) {
throw new OptimizeFailedEngineException(shardId, e);
} catch (OutOfMemoryError e) {
failEngine(e);
throw new OptimizeFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
if (indexWriter != null && indexWriter.getConfig().getMergePolicy() instanceof EnableMergePolicy) {
((EnableMergePolicy) indexWriter.getConfig().getMergePolicy()).disableMerge();
}
optimizeMutex.set(false);
}
}
// wait for the merges outside of the read lock
if (optimize.waitForMerge()) {
indexWriter.waitForMerges();
}
if (optimize.flush()) {
flush(new Flush().force(true));
}
if (optimize.refresh()) {
refresh(new Refresh(false).force(true));
}
}
@Override public <T> T snapshot(SnapshotHandler<T> snapshotHandler) throws EngineException {
SnapshotIndexCommit snapshotIndexCommit = null;
Translog.Snapshot traslogSnapshot = null;
rwl.readLock().lock();
try {
snapshotIndexCommit = deletionPolicy.snapshot();
traslogSnapshot = translog.snapshot();
} catch (Exception e) {
if (snapshotIndexCommit != null) snapshotIndexCommit.release();
throw new SnapshotFailedEngineException(shardId, e);
} finally {
rwl.readLock().unlock();
}
try {
return snapshotHandler.snapshot(snapshotIndexCommit, traslogSnapshot);
} finally {
snapshotIndexCommit.release();
traslogSnapshot.release();
}
}
@Override public void recover(RecoveryHandler recoveryHandler) throws EngineException {
// take a write lock here so it won't happen while a flush is in progress
// this means that next commits will not be allowed once the lock is released
rwl.writeLock().lock();
try {
disableFlushCounter++;
} finally {
rwl.writeLock().unlock();
}
SnapshotIndexCommit phase1Snapshot;
try {
phase1Snapshot = deletionPolicy.snapshot();
} catch (Exception e) {
--disableFlushCounter;
throw new RecoveryEngineException(shardId, 1, "Snapshot failed", e);
}
try {
recoveryHandler.phase1(phase1Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 1, "Execution failed", e);
}
Translog.Snapshot phase2Snapshot;
try {
phase2Snapshot = translog.snapshot();
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Snapshot failed", e);
}
try {
recoveryHandler.phase2(phase2Snapshot);
} catch (Exception e) {
--disableFlushCounter;
phase1Snapshot.release();
phase2Snapshot.release();
throw new RecoveryEngineException(shardId, 2, "Execution failed", e);
}
rwl.writeLock().lock();
Translog.Snapshot phase3Snapshot = null;
try {
phase3Snapshot = translog.snapshot(phase2Snapshot);
recoveryHandler.phase3(phase3Snapshot);
} catch (Exception e) {
throw new RecoveryEngineException(shardId, 3, "Execution failed", e);
} finally {
--disableFlushCounter;
rwl.writeLock().unlock();
phase1Snapshot.release();
phase2Snapshot.release();
if (phase3Snapshot != null) {
phase3Snapshot.release();
}
}
}
@Override public List<Segment> segments() {
rwl.readLock().lock();
try {
IndexWriter indexWriter = this.indexWriter;
if (indexWriter == null) {
throw new EngineClosedException(shardId, failedEngine);
}
Map<String, Segment> segments = new HashMap<String, Segment>();
// first, go over and compute the search ones...
Searcher searcher = searcher();
try {
IndexReader[] readers = searcher.reader().getSequentialSubReaders();
for (IndexReader reader : readers) {
assert reader instanceof SegmentReader;
SegmentInfo info = Lucene.getSegmentInfo((SegmentReader) reader);
assert !segments.containsKey(info.name);
Segment segment = new Segment(info.name);
segment.search = true;
segment.docCount = reader.numDocs();
segment.delDocCount = reader.numDeletedDocs();
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
}
} finally {
searcher.release();
}
// now, correlate or add the committed ones...
if (lastCommittedSegmentInfos != null) {
SegmentInfos infos = lastCommittedSegmentInfos;
for (SegmentInfo info : infos) {
Segment segment = segments.get(info.name);
if (segment == null) {
segment = new Segment(info.name);
segment.search = false;
segment.committed = true;
segment.docCount = info.docCount;
try {
segment.delDocCount = indexWriter.numDeletedDocs(info);
} catch (IOException e) {
logger.trace("failed to get deleted docs for committed segment", e);
}
try {
segment.sizeInBytes = info.sizeInBytes(true);
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.name);
}
segments.put(info.name, segment);
} else {
segment.committed = true;
}
}
}
Segment[] segmentsArr = segments.values().toArray(new Segment[segments.values().size()]);
Arrays.sort(segmentsArr, new Comparator<Segment>() {
@Override public int compare(Segment o1, Segment o2) {
return (int) (o1.generation() - o2.generation());
}
});
return Arrays.asList(segmentsArr);
} finally {
rwl.readLock().unlock();
}
}
@Override public void close() throws ElasticSearchException {
rwl.writeLock().lock();
try {
innerClose();
} finally {
rwl.writeLock().unlock();
}
}
private void failEngine(Throwable failure) {
synchronized (failedEngineMutex) {
if (failedEngine != null) {
return;
}
logger.warn("failed engine", failure);
failedEngine = failure;
for (FailedEngineListener listener : failedEngineListeners) {
listener.onFailedEngine(shardId, failure);
}
innerClose();
}
}
private void innerClose() {
if (closed) {
return;
}
indexSettingsService.removeListener(applySettings);
closed = true;
this.versionMap.clear();
this.failedEngineListeners.clear();
try {
if (nrtResource != null) {
this.nrtResource.forceClose();
}
// no need to commit in this case!, we snapshot before we close the shard, so translog and all sync'ed
if (indexWriter != null) {
try {
indexWriter.rollback();
} catch (AlreadyClosedException e) {
// ignore
}
}
} catch (Exception e) {
logger.debug("failed to rollback writer on close", e);
} finally {
indexWriter = null;
}
}
private Object dirtyLock(String id) {
int hash = id.hashCode();
// abs returns Integer.MIN_VALUE, so we need to protect against it...
if (hash == Integer.MIN_VALUE) {
hash = 0;
}
return dirtyLocks[Math.abs(hash) % dirtyLocks.length];
}
private Object dirtyLock(Term uid) {
return dirtyLock(uid.text());
}
private long loadCurrentVersionFromIndex(Term uid) {
UnicodeUtil.UTF8Result utf8 = Unicode.fromStringAsUtf8(uid.text());
Searcher searcher = searcher();
try {
for (IndexReader reader : searcher.searcher().subReaders()) {
BloomFilter filter = bloomCache.filter(reader, UidFieldMapper.NAME, asyncLoadBloomFilter);
// we know that its not there...
if (!filter.isPresent(utf8.result, 0, utf8.length)) {
continue;
}
long version = UidField.loadVersion(reader, uid);
// either -2 (its there, but no version associated), or an actual version
if (version != -1) {
return version;
}
}
return -1;
} finally {
searcher.release();
}
}
private IndexWriter createWriter() throws IOException {
IndexWriter indexWriter = null;
try {
// release locks when started
if (IndexWriter.isLocked(store.directory())) {
logger.warn("shard is locked, releasing lock");
IndexWriter.unlock(store.directory());
}
boolean create = !IndexReader.indexExists(store.directory());
IndexWriterConfig config = new IndexWriterConfig(Lucene.VERSION, analysisService.defaultIndexAnalyzer());
config.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
config.setIndexDeletionPolicy(deletionPolicy);
config.setMergeScheduler(mergeScheduler.newMergeScheduler());
config.setMergePolicy(mergePolicyProvider.newMergePolicy());
config.setSimilarity(similarityService.defaultIndexSimilarity());
config.setRAMBufferSizeMB(indexingBufferSize.mbFrac());
config.setTermIndexInterval(termIndexInterval);
config.setReaderTermsIndexDivisor(termIndexDivisor);
config.setMaxThreadStates(indexConcurrency);
indexWriter = new IndexWriter(store.directory(), config);
} catch (IOException e) {
safeClose(indexWriter);
throw e;
}
return indexWriter;
}
static {
IndexMetaData.addDynamicSettings(
"index.term_index_interval",
"index.term_index_divisor",
"index.index_concurrency",
"index.gc_deletes"
);
}
class ApplySettings implements IndexSettingsService.Listener {
@Override public void onRefreshSettings(Settings settings) {
long gcDeletesInMillis = indexSettings.getAsTime("index.gc_deletes", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis)).millis();
if (gcDeletesInMillis != RobinEngine.this.gcDeletesInMillis) {
logger.info("updating index.gc_deletes from [{}] to [{}]", TimeValue.timeValueMillis(RobinEngine.this.gcDeletesInMillis), TimeValue.timeValueMillis(gcDeletesInMillis));
RobinEngine.this.gcDeletesInMillis = gcDeletesInMillis;
}
int termIndexInterval = settings.getAsInt("index.term_index_interval", RobinEngine.this.termIndexInterval);
int termIndexDivisor = settings.getAsInt("index.term_index_divisor", RobinEngine.this.termIndexDivisor); // IndexReader#DEFAULT_TERMS_INDEX_DIVISOR
int indexConcurrency = settings.getAsInt("index.index_concurrency", RobinEngine.this.indexConcurrency);
boolean requiresFlushing = false;
if (termIndexInterval != RobinEngine.this.termIndexInterval || termIndexDivisor != RobinEngine.this.termIndexDivisor) {
rwl.readLock().lock();
try {
if (termIndexInterval != RobinEngine.this.termIndexInterval) {
logger.info("updating index.term_index_interval from [{}] to [{}]", RobinEngine.this.termIndexInterval, termIndexInterval);
RobinEngine.this.termIndexInterval = termIndexInterval;
indexWriter.getConfig().setTermIndexInterval(termIndexInterval);
}
if (termIndexDivisor != RobinEngine.this.termIndexDivisor) {
logger.info("updating index.term_index_divisor from [{}] to [{}]", RobinEngine.this.termIndexDivisor, termIndexDivisor);
RobinEngine.this.termIndexDivisor = termIndexDivisor;
indexWriter.getConfig().setReaderTermsIndexDivisor(termIndexDivisor);
// we want to apply this right now for readers, even "current" ones
requiresFlushing = true;
}
if (indexConcurrency != RobinEngine.this.indexConcurrency) {
logger.info("updating index.index_concurrency from [{}] to [{}]", RobinEngine.this.indexConcurrency, indexConcurrency);
RobinEngine.this.indexConcurrency = indexConcurrency;
// we have to flush in this case, since it only applies on a new index writer
requiresFlushing = true;
}
} finally {
rwl.readLock().unlock();
}
if (requiresFlushing) {
flush(new Flush().full(true));
}
}
}
}
private AcquirableResource<ReaderSearcherHolder> buildNrtResource(IndexWriter indexWriter) throws IOException {
IndexReader indexReader = IndexReader.open(indexWriter, true);
ExtendedIndexSearcher indexSearcher = new ExtendedIndexSearcher(indexReader);
indexSearcher.setSimilarity(similarityService.defaultSearchSimilarity());
return newAcquirableResource(new ReaderSearcherHolder(indexSearcher));
}
private static class RobinSearchResult implements Searcher {
private final AcquirableResource<ReaderSearcherHolder> nrtHolder;
private RobinSearchResult(AcquirableResource<ReaderSearcherHolder> nrtHolder) {
this.nrtHolder = nrtHolder;
}
@Override public IndexReader reader() {
return nrtHolder.resource().reader();
}
@Override public ExtendedIndexSearcher searcher() {
return nrtHolder.resource().searcher();
}
@Override public boolean release() throws ElasticSearchException {
nrtHolder.release();
return true;
}
}
static class VersionValue {
private final long version;
private final boolean delete;
private final long time;
private final Translog.Location translogLocation;
VersionValue(long version, boolean delete, long time, Translog.Location translogLocation) {
this.version = version;
this.delete = delete;
this.time = time;
this.translogLocation = translogLocation;
}
public long time() {
return this.time;
}
public long version() {
return version;
}
public boolean delete() {
return delete;
}
public Translog.Location translogLocation() {
return this.translogLocation;
}
}
}
|
diff --git a/src/mc/alk/arena/objects/arenas/Persistable.java b/src/mc/alk/arena/objects/arenas/Persistable.java
index e4eecec..60106e0 100644
--- a/src/mc/alk/arena/objects/arenas/Persistable.java
+++ b/src/mc/alk/arena/objects/arenas/Persistable.java
@@ -1,332 +1,332 @@
package mc.alk.arena.objects.arenas;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import mc.alk.arena.objects.YamlSerializable;
import mc.alk.arena.serializers.Persist;
import mc.alk.arena.util.InventoryUtil;
import mc.alk.arena.util.SerializerUtil;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.inventory.ItemStack;
public class Persistable {
public static class NotPersistableException extends Exception{
private static final long serialVersionUID = 1L;
public NotPersistableException(String msg){
super(msg);
}
}
public static void yamlToObjects(Arena arena, ConfigurationSection cs){
if (cs == null)
return;
yamlToObjects(arena, arena.getClass(),cs);
}
private static void yamlToObjects(Arena arena, Class<?> arenaClass, ConfigurationSection cs){
for(Field field : arenaClass.getDeclaredFields()){
Class<?> type = field.getType();
String name = field.getName();
Annotation[] annotations = field.getDeclaredAnnotations();
for (Annotation a : annotations){
if (!(a instanceof Persist || !cs.contains(name))){
continue;
}
// System.out.println("Type = " + type +" " + name +" " + annotations + " " + cs.getString(name));
field.setAccessible(true);
try {
Object obj = null;
if (type == int.class){
field.setInt(arena, cs.getInt(name));
} else if (type == float.class){
field.setFloat(arena, (float)cs.getDouble(name));
} else if (type == double.class){
field.setDouble(arena, cs.getDouble(name));
} else if (type == long.class){
field.setLong(arena, cs.getLong(name));
} else if (type == boolean.class){
field.setBoolean(arena, cs.getBoolean(name));
} else if (type == short.class){
field.setShort(arena, (short)cs.getInt(name));
} else if (type == byte.class){
field.setByte(arena, (byte)cs.getInt(name));
} else if (type == char.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
field.setChar(arena, str.charAt(0));
} else if (type == Integer.class){
obj = cs.getInt(name);
} else if (type == Float.class){
Double d= cs.getDouble(name);
if (d != null)
obj = new Float(d);
} else if (type == Double.class){
obj = cs.getDouble(name);
} else if (type == Character.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
obj = str.charAt(0);
} else if (type == Byte.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Byte(i.byteValue());
} else if (type == Short.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Short(i.shortValue());
} else if (type == Long.class){
obj = cs.getLong(name);
} else if (type == Boolean.class){
obj = new Boolean( cs.getBoolean(name));
} else if (type == String.class){
obj = cs.getString(name);
} else if (type == Location.class){
String locstr = cs.getString(name);
obj = SerializerUtil.getLocation(locstr);
} else if (type == ItemStack.class){
String str = cs.getString(name);
if (str != null)
obj = InventoryUtil.parseItem(str);
} else if (List.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
List<Object> newList = new ArrayList<Object>();
for (Object o : list){
newList.add(yamlToObj((String)o,genType, cs));
}
obj = newList;
} else if (Set.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
Set<Object> newSet = new HashSet<Object>();
for (Object o : list){
newSet.add(yamlToObj((String)o,genType,cs));
}
obj = newSet;
} else if (Map.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
ConfigurationSection mapcs = cs.getConfigurationSection(name);
if (mapcs == null)
continue;
Set<String> keyset = mapcs.getKeys(false);
Type keyType = pt.getActualTypeArguments()[0];
Type mapType = pt.getActualTypeArguments()[1];
Map<Object,Object> newMap = new HashMap<Object,Object>();
for (String key : keyset){
Object k = yamlToObj(key,keyType,cs);
- Object v = yamlToObj((String)mapcs.get(key), mapType,cs);
+ Object v = yamlToObj(mapcs.get(key).toString(), mapType,cs);
if (k != null && v != null)
newMap.put(k,v);
}
obj = newMap;
} else if (YamlSerializable.class.isAssignableFrom(type)){
obj = createYamlSerializable(type,cs.getConfigurationSection(name));
} else {
obj = yamlToObj(name,type,cs);
}
if (obj != null)
field.set(arena, obj);
} catch (NotPersistableException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
}
}
Class<?> superClass = arenaClass.getSuperclass();
if (superClass != null && Arena.class.isAssignableFrom(superClass) ){
yamlToObjects(arena,superClass,cs);
}
}
private static Object yamlToObj(String name, Type type, ConfigurationSection cs) throws Exception {
if (type == Integer.class){
return new Integer(name);
} else if (type == Float.class){
return new Float(name);
} else if (type == Double.class){
return new Double(name);
} else if (type == Character.class){
return name.charAt(0);
} else if (type == Byte.class){
return new Byte(name);
} else if (type == Short.class){
return new Short(name);
} else if (type == Long.class){
return new Long(name);
} else if (type == Boolean.class){
return new Boolean(name);
} else if (type == String.class){
return name;
} else if (type == Location.class){
return SerializerUtil.getLocation(name);
} else if (type == ItemStack.class){
return InventoryUtil.parseItem(name);
}
throw new NotPersistableException("Type " + type +" is not persistable. Not loading values for "+name);
}
private static Object createYamlSerializable(Class<?> clazz, ConfigurationSection cs) {
if (clazz == null)
return null;
Class<?>[] args = {};
try {
Constructor<?> constructor = clazz.getConstructor(args);
YamlSerializable ys = (YamlSerializable) constructor.newInstance((Object[])args);
if (ys == null)
return null;
ys = (YamlSerializable) ys.yamlToObject(cs);
return ys;
} catch (NoSuchMethodException e){
System.err.println("If you have custom constructors for your YamlSerializable class you must also have a public default constructor");
System.err.println("Add the following line to your YamlSerializable Class '" + clazz.getSimpleName()+".java'");
System.err.println("public " + clazz.getSimpleName()+"(){}");
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
public static Map<String, Object> objectsToYamlMap(Arena arena) {
Map<String,Object> map = new HashMap<String,Object>();
Class<?> arenaClass = arena.getClass();
objectsToYamlMap(arena,arenaClass,map);
return map;
}
private static void objectsToYamlMap(Arena arena, Class<?> arenaClass, Map<String,Object> map){
for(Field field : arenaClass.getDeclaredFields()){
Class<?> type = field.getType();
String name = field.getName();
Annotation[] annotations = field.getDeclaredAnnotations();
for (Annotation a : annotations){
if (!(a instanceof Persist)){
continue;
}
// System.out.println("Type = " + type +" " + name +" " + annotations);
field.setAccessible(true);
try {
Object obj = null;
if (type == Integer.class || type == Float.class || type == Double.class ||
type == Byte.class || type == Boolean.class || type == Character.class ||
type == Short.class || type == Long.class || type==String.class){
obj = field.get(arena);
} else if (type == int.class){
map.put(name, field.getInt(arena));
} else if (type == float.class){
map.put(name, field.getFloat(arena));
} else if (type == double.class){
map.put(name, field.getDouble(arena));
} else if (type == byte.class){
map.put(name, field.getByte(arena));
} else if (type == boolean.class){
map.put(name, field.getBoolean(arena));
} else if (type == char.class){
map.put(name, field.getChar(arena));
} else if (type == short.class){
map.put(name, field.getShort(arena));
} else if (type == long.class){
map.put(name, field.getLong(arena));
} else if (type == Location.class || type == ItemStack.class){
obj = objToYaml(field.get(arena));
} else if (List.class.isAssignableFrom(type)){
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) field.get(arena);
if (list == null)
continue;
List<Object> olist = new ArrayList<Object>();
for (Object o : list){
olist.add(objToYaml(o));
}
obj = olist;
} else if (Set.class.isAssignableFrom(type)){
/// Just convert to a list, then we can put it back into set form later when we deserialize
@SuppressWarnings("unchecked")
Set<Object> set = (Set<Object>) field.get(arena);
if (set == null)
continue;
List<Object> oset = new ArrayList<Object>();
for (Object o : set){
oset.add(objToYaml(o));
}
obj = oset;
} else if (Map.class.isAssignableFrom(type)){
@SuppressWarnings("unchecked")
Map<Object,Object> mymap = (HashMap<Object,Object>) field.get(arena);
if (mymap == null)
continue;
Map<Object,Object> oset = new HashMap<Object,Object>();
for (Object o : mymap.keySet()){
Object key = objToYaml(o);
if (key == null)
continue;
Object value = mymap.get(o);
if (value == null)
continue;
oset.put(key.toString(),objToYaml(value));
}
obj = oset;
} else if (YamlSerializable.class.isAssignableFrom(type)){
YamlSerializable ys = (YamlSerializable) field.get(arena);
if (ys != null)
obj = ys.objectToYaml();
} else {
throw new NotPersistableException("Type " + type +" is not persistable. Not saving value for " + name);
}
if (obj == null)
continue;
map.put(name, obj);
} catch (NotPersistableException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
}
}
Class<?> superClass = arenaClass.getSuperclass();
if (superClass != null && Arena.class.isAssignableFrom(superClass) ){
objectsToYamlMap(arena,superClass,map);
}
}
private static Object objToYaml(Object obj) {
if (obj == null)
return null;
if (obj instanceof Location){
return SerializerUtil.getLocString((Location)obj);
} else if (obj instanceof ItemStack){
return InventoryUtil.getItemString((ItemStack)obj);
} else if (obj instanceof YamlSerializable){
return ((YamlSerializable)obj).objectToYaml();
}
return obj;
}
}
| true | true | private static void yamlToObjects(Arena arena, Class<?> arenaClass, ConfigurationSection cs){
for(Field field : arenaClass.getDeclaredFields()){
Class<?> type = field.getType();
String name = field.getName();
Annotation[] annotations = field.getDeclaredAnnotations();
for (Annotation a : annotations){
if (!(a instanceof Persist || !cs.contains(name))){
continue;
}
// System.out.println("Type = " + type +" " + name +" " + annotations + " " + cs.getString(name));
field.setAccessible(true);
try {
Object obj = null;
if (type == int.class){
field.setInt(arena, cs.getInt(name));
} else if (type == float.class){
field.setFloat(arena, (float)cs.getDouble(name));
} else if (type == double.class){
field.setDouble(arena, cs.getDouble(name));
} else if (type == long.class){
field.setLong(arena, cs.getLong(name));
} else if (type == boolean.class){
field.setBoolean(arena, cs.getBoolean(name));
} else if (type == short.class){
field.setShort(arena, (short)cs.getInt(name));
} else if (type == byte.class){
field.setByte(arena, (byte)cs.getInt(name));
} else if (type == char.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
field.setChar(arena, str.charAt(0));
} else if (type == Integer.class){
obj = cs.getInt(name);
} else if (type == Float.class){
Double d= cs.getDouble(name);
if (d != null)
obj = new Float(d);
} else if (type == Double.class){
obj = cs.getDouble(name);
} else if (type == Character.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
obj = str.charAt(0);
} else if (type == Byte.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Byte(i.byteValue());
} else if (type == Short.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Short(i.shortValue());
} else if (type == Long.class){
obj = cs.getLong(name);
} else if (type == Boolean.class){
obj = new Boolean( cs.getBoolean(name));
} else if (type == String.class){
obj = cs.getString(name);
} else if (type == Location.class){
String locstr = cs.getString(name);
obj = SerializerUtil.getLocation(locstr);
} else if (type == ItemStack.class){
String str = cs.getString(name);
if (str != null)
obj = InventoryUtil.parseItem(str);
} else if (List.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
List<Object> newList = new ArrayList<Object>();
for (Object o : list){
newList.add(yamlToObj((String)o,genType, cs));
}
obj = newList;
} else if (Set.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
Set<Object> newSet = new HashSet<Object>();
for (Object o : list){
newSet.add(yamlToObj((String)o,genType,cs));
}
obj = newSet;
} else if (Map.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
ConfigurationSection mapcs = cs.getConfigurationSection(name);
if (mapcs == null)
continue;
Set<String> keyset = mapcs.getKeys(false);
Type keyType = pt.getActualTypeArguments()[0];
Type mapType = pt.getActualTypeArguments()[1];
Map<Object,Object> newMap = new HashMap<Object,Object>();
for (String key : keyset){
Object k = yamlToObj(key,keyType,cs);
Object v = yamlToObj((String)mapcs.get(key), mapType,cs);
if (k != null && v != null)
newMap.put(k,v);
}
obj = newMap;
} else if (YamlSerializable.class.isAssignableFrom(type)){
obj = createYamlSerializable(type,cs.getConfigurationSection(name));
} else {
obj = yamlToObj(name,type,cs);
}
if (obj != null)
field.set(arena, obj);
} catch (NotPersistableException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
}
}
Class<?> superClass = arenaClass.getSuperclass();
if (superClass != null && Arena.class.isAssignableFrom(superClass) ){
yamlToObjects(arena,superClass,cs);
}
}
| private static void yamlToObjects(Arena arena, Class<?> arenaClass, ConfigurationSection cs){
for(Field field : arenaClass.getDeclaredFields()){
Class<?> type = field.getType();
String name = field.getName();
Annotation[] annotations = field.getDeclaredAnnotations();
for (Annotation a : annotations){
if (!(a instanceof Persist || !cs.contains(name))){
continue;
}
// System.out.println("Type = " + type +" " + name +" " + annotations + " " + cs.getString(name));
field.setAccessible(true);
try {
Object obj = null;
if (type == int.class){
field.setInt(arena, cs.getInt(name));
} else if (type == float.class){
field.setFloat(arena, (float)cs.getDouble(name));
} else if (type == double.class){
field.setDouble(arena, cs.getDouble(name));
} else if (type == long.class){
field.setLong(arena, cs.getLong(name));
} else if (type == boolean.class){
field.setBoolean(arena, cs.getBoolean(name));
} else if (type == short.class){
field.setShort(arena, (short)cs.getInt(name));
} else if (type == byte.class){
field.setByte(arena, (byte)cs.getInt(name));
} else if (type == char.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
field.setChar(arena, str.charAt(0));
} else if (type == Integer.class){
obj = cs.getInt(name);
} else if (type == Float.class){
Double d= cs.getDouble(name);
if (d != null)
obj = new Float(d);
} else if (type == Double.class){
obj = cs.getDouble(name);
} else if (type == Character.class){
String str = cs.getString(name);
if (str != null && !str.isEmpty())
obj = str.charAt(0);
} else if (type == Byte.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Byte(i.byteValue());
} else if (type == Short.class){
Integer i= cs.getInt(name);
if (i != null)
obj = new Short(i.shortValue());
} else if (type == Long.class){
obj = cs.getLong(name);
} else if (type == Boolean.class){
obj = new Boolean( cs.getBoolean(name));
} else if (type == String.class){
obj = cs.getString(name);
} else if (type == Location.class){
String locstr = cs.getString(name);
obj = SerializerUtil.getLocation(locstr);
} else if (type == ItemStack.class){
String str = cs.getString(name);
if (str != null)
obj = InventoryUtil.parseItem(str);
} else if (List.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
List<Object> newList = new ArrayList<Object>();
for (Object o : list){
newList.add(yamlToObj((String)o,genType, cs));
}
obj = newList;
} else if (Set.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
List<?> list = cs.getList(name);
if (list == null)
continue;
Type genType = pt.getActualTypeArguments()[0];
Set<Object> newSet = new HashSet<Object>();
for (Object o : list){
newSet.add(yamlToObj((String)o,genType,cs));
}
obj = newSet;
} else if (Map.class.isAssignableFrom(type)){
ParameterizedType pt = (ParameterizedType) field.getGenericType();
ConfigurationSection mapcs = cs.getConfigurationSection(name);
if (mapcs == null)
continue;
Set<String> keyset = mapcs.getKeys(false);
Type keyType = pt.getActualTypeArguments()[0];
Type mapType = pt.getActualTypeArguments()[1];
Map<Object,Object> newMap = new HashMap<Object,Object>();
for (String key : keyset){
Object k = yamlToObj(key,keyType,cs);
Object v = yamlToObj(mapcs.get(key).toString(), mapType,cs);
if (k != null && v != null)
newMap.put(k,v);
}
obj = newMap;
} else if (YamlSerializable.class.isAssignableFrom(type)){
obj = createYamlSerializable(type,cs.getConfigurationSection(name));
} else {
obj = yamlToObj(name,type,cs);
}
if (obj != null)
field.set(arena, obj);
} catch (NotPersistableException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
}
}
Class<?> superClass = arenaClass.getSuperclass();
if (superClass != null && Arena.class.isAssignableFrom(superClass) ){
yamlToObjects(arena,superClass,cs);
}
}
|
diff --git a/org.eclipse.mylyn.wikitext.core/src/org/eclipse/mylyn/wikitext/core/parser/outline/OutlineItem.java b/org.eclipse.mylyn.wikitext.core/src/org/eclipse/mylyn/wikitext/core/parser/outline/OutlineItem.java
index 74c4d071..888faaa2 100644
--- a/org.eclipse.mylyn.wikitext.core/src/org/eclipse/mylyn/wikitext/core/parser/outline/OutlineItem.java
+++ b/org.eclipse.mylyn.wikitext.core/src/org/eclipse/mylyn/wikitext/core/parser/outline/OutlineItem.java
@@ -1,369 +1,369 @@
/*******************************************************************************
* Copyright (c) 2007, 2009 David Green and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David Green - initial API and implementation
*******************************************************************************/
package org.eclipse.mylyn.wikitext.core.parser.outline;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* An item in a document outline. A document outline reflects the heading structure of the document. Generally there is
* always a root item that represents the document itself. Every level-1 heading becomes a child item of the root.
*
* @author David Green
* @since 1.0
*/
public class OutlineItem {
private OutlineItem parent;
private final int level;
private List<OutlineItem> children = new ArrayList<OutlineItem>();
private final int offset;
private int length;
private final String id;
private String label;
private String kind;
private int childOffset;
private String tooltip;
private Map<String, OutlineItem> itemsById;
private String resourcePath;
public OutlineItem(OutlineItem parent, int level, String id, int offset, int length, String label) {
super();
this.parent = parent;
this.level = (parent == null) ? 0 : level;
if (parent != null && level < parent.getLevel()) {
throw new IllegalArgumentException();
}
this.id = id;
this.offset = offset;
this.length = length;
this.label = label;
if (parent != null) {
parent.addChild(this);
}
}
/**
* get the length of the outline item, which corresponds to the length of the heading text. The length does not
* include content following the heading text itself.
*
* @see #getSectionLength()
*/
public int getLength() {
return length;
}
/**
* get the length of the section, which is the length of the heading text plus the length of any following content
* up to the next peer-leveled heading or the parent's following sibling.
*
* @see #getLength()
*/
public int getSectionLength() {
if (parent == null) {
return length;
}
List<OutlineItem> siblings = getParent().getChildren();
int index = siblings.indexOf(this);
if (index < (siblings.size() - 1)) {
return siblings.get(index + 1).getOffset() - getOffset();
}
int parentRelativeOffset = getOffset() - parent.getOffset();
return parent.getSectionLength() - parentRelativeOffset;
}
public String getKind() {
return kind;
}
public void setKind(String kind) {
this.kind = kind;
}
/**
* the text of the heading which could be truncated
*/
public String getLabel() {
return label;
}
/**
* the id of the heading, which is typically (though not guaranteed to be) unique within a document. Heading ids may
* be used as the target of document-relative anchors
*/
public String getId() {
return id;
}
/**
* the level of the document which is positive and usually <= 6 except for the root item where the value is
* undefined.
*/
public int getLevel() {
if (parent == null) {
return 0;
}
return level;
}
void setLength(int length) {
this.length = length;
}
public void setLabel(String label) {
this.label = label;
}
public OutlineItem getParent() {
return parent;
}
/**
* indicate if this is the root item (that is, the item representing the whole document)
*/
public boolean isRootItem() {
return parent == null;
}
/**
* Get the previous item. The order of the items is determined via document order traversal of all nodes in the
* outline.
*
* @return the previous item or null if there is no previous (ie: the root item).
*/
public OutlineItem getPrevious() {
if (parent == null) {
return null;
}
List<OutlineItem> siblings = parent.getChildren();
int index = siblings.indexOf(this);
if (index > 0) {
return siblings.get(index - 1);
}
return parent;
}
public List<OutlineItem> getChildren() {
return children;
}
public int getOffset() {
return offset;
}
@Override
public int hashCode() {
return calculatePositionKey().hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final OutlineItem other = (OutlineItem) obj;
return other.calculatePositionKey().equals(calculatePositionKey());
}
public void clear() {
children.clear();
}
private String calculatePositionKey() {
if (parent == null) {
return ""; //$NON-NLS-1$
}
return getParent().calculatePositionKey() + "/" + kind + childOffset; //$NON-NLS-1$
}
private void addChild(OutlineItem outlineItem) {
outlineItem.childOffset = children.size();
children.add(outlineItem);
}
public OutlineItem findNearestMatchingOffset(int offset) {
NearestItemVisitor visitor = new NearestItemVisitor(offset);
accept(visitor);
return visitor.nearest;
}
public OutlineItem findItemById(String id) {
if (itemsById == null) {
itemsById = new HashMap<String, OutlineItem>();
accept(new Visitor() {
public boolean visit(OutlineItem item) {
if (item.getId() != null) {
itemsById.put(item.getId(), item);
}
return true;
}
});
}
return itemsById.get(id);
}
private static class NearestItemVisitor implements Visitor {
private OutlineItem nearest = null;
private final int offset;
public NearestItemVisitor(int offset) {
this.offset = offset;
}
public boolean visit(OutlineItem item) {
- if (item.getOffset() == -1) {
+ if (item.isRootItem()) {
return true;
}
if (nearest == null) {
nearest = item;
return true;
}
int itemDistance = item.distance(offset);
if (itemDistance > 0) {
return true;
}
int nearestDistance = nearest.distance(offset);
nearestDistance = Math.abs(nearestDistance);
itemDistance = Math.abs(itemDistance);
if (itemDistance < nearestDistance) {
nearest = item;
} else if (itemDistance > nearestDistance) {
return false;
}
return true;
}
}
public int distance(int offset) {
int startDistance = this.offset - offset;
return startDistance;
}
public interface Visitor {
/**
* @param item
* the item to visit
*
* @return true if the items children should be visited
*/
public boolean visit(OutlineItem item);
}
public void accept(Visitor visitor) {
if (visitor.visit(this)) {
for (OutlineItem item : getChildren()) {
item.accept(visitor);
}
}
}
public void setTooltip(String tooltip) {
this.tooltip = tooltip;
}
public String getTooltip() {
return tooltip;
}
/**
* the resource path to the resource of this outline item
*
* @return the resource path, or null if it's unknown.
*/
public String getResourcePath() {
if (getParent() != null) {
return getParent().getResourcePath();
}
return resourcePath;
}
/**
* the resource path to the resource of this outline item
*
* @param resourcePath
* the resource path, or null if it's unknown.
*/
public void setResourcePath(String resourcePath) {
if (getParent() != null) {
getParent().setResourcePath(resourcePath);
} else {
this.resourcePath = resourcePath;
}
}
/**
* move children from the given outline item to this
*/
public void moveChildren(OutlineItem otherParent) {
if (!otherParent.children.isEmpty()) {
if (children.isEmpty()) {
List<OutlineItem> temp = children;
children = otherParent.children;
otherParent.children = temp;
for (OutlineItem child : children) {
child.parent = this;
}
} else {
children.addAll(otherParent.children);
for (OutlineItem child : otherParent.children) {
child.parent = this;
}
otherParent.children.clear();
}
}
itemsById = null;
setLength(otherParent.getLength());
}
/**
* Indicate if this outline item contains the given outline item. The computation uses outline item offsets (the
* {@link #getOffset() offset} and {@link #getSectionLength() section length}.
*
* @return true if and only if the offsets of the provided item lie within the offsets of this outline item.
*/
public boolean contains(OutlineItem item) {
if (item == this || isRootItem()) {
return true;
}
if (getOffset() <= item.getOffset()) {
int end = getOffset() + getSectionLength();
int itemEnd = item.getOffset() + item.getSectionLength();
if (end >= itemEnd) {
return true;
}
}
return false;
}
}
| true | true | public boolean visit(OutlineItem item) {
if (item.getOffset() == -1) {
return true;
}
if (nearest == null) {
nearest = item;
return true;
}
int itemDistance = item.distance(offset);
if (itemDistance > 0) {
return true;
}
int nearestDistance = nearest.distance(offset);
nearestDistance = Math.abs(nearestDistance);
itemDistance = Math.abs(itemDistance);
if (itemDistance < nearestDistance) {
nearest = item;
} else if (itemDistance > nearestDistance) {
return false;
}
return true;
}
| public boolean visit(OutlineItem item) {
if (item.isRootItem()) {
return true;
}
if (nearest == null) {
nearest = item;
return true;
}
int itemDistance = item.distance(offset);
if (itemDistance > 0) {
return true;
}
int nearestDistance = nearest.distance(offset);
nearestDistance = Math.abs(nearestDistance);
itemDistance = Math.abs(itemDistance);
if (itemDistance < nearestDistance) {
nearest = item;
} else if (itemDistance > nearestDistance) {
return false;
}
return true;
}
|
diff --git a/weaver/src/org/aspectj/weaver/ResolvedType.java b/weaver/src/org/aspectj/weaver/ResolvedType.java
index 1d4b40188..c062c22c9 100644
--- a/weaver/src/org/aspectj/weaver/ResolvedType.java
+++ b/weaver/src/org/aspectj/weaver/ResolvedType.java
@@ -1,1700 +1,1703 @@
/* *******************************************************************
* Copyright (c) 2002 Palo Alto Research Center, Incorporated (PARC).
* All rights reserved.
* This program and the accompanying materials are made available
* under the terms of the Common Public License v1.0
* which accompanies this distribution and is available at
* http://www.eclipse.org/legal/cpl-v10.html
*
* Contributors:
* PARC initial implementation
* ******************************************************************/
package org.aspectj.weaver;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.aspectj.bridge.IMessage;
import org.aspectj.bridge.ISourceLocation;
import org.aspectj.bridge.Message;
import org.aspectj.bridge.MessageUtil;
import org.aspectj.util.FuzzyBoolean;
import org.aspectj.weaver.patterns.Declare;
import org.aspectj.weaver.patterns.PerClause;
public abstract class ResolvedType extends UnresolvedType implements AnnotatedElement {
private static final ResolvedType[] EMPTY_RESOLVED_TYPE_ARRAY = new ResolvedType[0];
public static final String PARAMETERIZED_TYPE_IDENTIFIER = "P";
private ResolvedType[] resolvedTypeParams;
protected World world;
protected ResolvedType(String signature, World world) {
super(signature);
this.world = world;
}
protected ResolvedType(String signature, String signatureErasure, World world) {
super(signature,signatureErasure);
this.world = world;
}
// ---- things that don't require a world
/**
* Returns an iterator through ResolvedType objects representing all the direct
* supertypes of this type. That is, through the superclass, if any, and
* all declared interfaces.
*/
public final Iterator getDirectSupertypes() {
Iterator ifacesIterator = Iterators.array(getDeclaredInterfaces());
ResolvedType superclass = getSuperclass();
if (superclass == null) {
return ifacesIterator;
} else {
return Iterators.snoc(ifacesIterator, superclass);
}
}
public abstract ResolvedMember[] getDeclaredFields();
public abstract ResolvedMember[] getDeclaredMethods();
public abstract ResolvedType[] getDeclaredInterfaces();
public abstract ResolvedMember[] getDeclaredPointcuts();
/**
* Returns a ResolvedType object representing the superclass of this type, or null.
* If this represents a java.lang.Object, a primitive type, or void, this
* method returns null.
*/
public abstract ResolvedType getSuperclass();
/**
* Returns the modifiers for this type.
*
* See {@link java.lang.Class#getModifiers()} for a description
* of the weirdness of this methods on primitives and arrays.
*
* @param world the {@link World} in which the lookup is made.
* @return an int representing the modifiers for this type
* @see java.lang.reflect.Modifier
*/
public abstract int getModifiers();
public ResolvedType[] getAnnotationTypes() {
return EMPTY_RESOLVED_TYPE_ARRAY;
}
public final UnresolvedType getSuperclass(World world) {
return getSuperclass();
}
// This set contains pairs of types whose signatures are concatenated
// together, this means with a fast lookup we can tell if two types
// are equivalent.
static Set validBoxing = new HashSet();
static {
validBoxing.add("Ljava/lang/Byte;B");
validBoxing.add("Ljava/lang/Character;C");
validBoxing.add("Ljava/lang/Double;D");
validBoxing.add("Ljava/lang/Float;F");
validBoxing.add("Ljava/lang/Integer;I");
validBoxing.add("Ljava/lang/Long;J");
validBoxing.add("Ljava/lang/Short;S");
validBoxing.add("Ljava/lang/Boolean;Z");
validBoxing.add("BLjava/lang/Byte;");
validBoxing.add("CLjava/lang/Character;");
validBoxing.add("DLjava/lang/Double;");
validBoxing.add("FLjava/lang/Float;");
validBoxing.add("ILjava/lang/Integer;");
validBoxing.add("JLjava/lang/Long;");
validBoxing.add("SLjava/lang/Short;");
validBoxing.add("ZLjava/lang/Boolean;");
}
// utilities
public ResolvedType getResolvedComponentType() {
return null;
}
public World getWorld() {
return world;
}
// ---- things from object
public final boolean equals(Object other) {
if (other instanceof ResolvedType) {
return this == other;
} else {
return super.equals(other);
}
}
// ---- difficult things
/**
* returns an iterator through all of the fields of this type, in order
* for checking from JVM spec 2ed 5.4.3.2. This means that the order is
*
* <ul><li> fields from current class </li>
* <li> recur into direct superinterfaces </li>
* <li> recur into superclass </li>
* </ul>
*
* We keep a hashSet of interfaces that we've visited so we don't spiral
* out into 2^n land.
*/
public Iterator getFields() {
final Iterators.Filter dupFilter = Iterators.dupFilter();
Iterators.Getter typeGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return
dupFilter.filter(
((ResolvedType)o).getDirectSupertypes());
}
};
Iterators.Getter fieldGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return Iterators.array(((ResolvedType)o).getDeclaredFields());
}
};
return
Iterators.mapOver(
Iterators.recur(this, typeGetter),
fieldGetter);
}
/**
* returns an iterator through all of the methods of this type, in order
* for checking from JVM spec 2ed 5.4.3.3. This means that the order is
*
* <ul><li> methods from current class </li>
* <li> recur into superclass, all the way up, not touching interfaces </li>
* <li> recur into all superinterfaces, in some unspecified order </li>
* </ul>
*
* We keep a hashSet of interfaces that we've visited so we don't spiral
* out into 2^n land.
* NOTE: Take a look at the javadoc on getMethodsWithoutIterator() to see if
* you are sensitive to a quirk in getMethods()
*/
public Iterator getMethods() {
final Iterators.Filter dupFilter = Iterators.dupFilter();
Iterators.Getter ifaceGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return
dupFilter.filter(
Iterators.array(((ResolvedType)o).getDeclaredInterfaces())
);
}
};
Iterators.Getter methodGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return Iterators.array(((ResolvedType)o).getDeclaredMethods());
}
};
return
Iterators.mapOver(
Iterators.append(
new Iterator() {
ResolvedType curr = ResolvedType.this;
public boolean hasNext() {
return curr != null;
}
public Object next() {
ResolvedType ret = curr;
curr = curr.getSuperclass();
return ret;
}
public void remove() {
throw new UnsupportedOperationException();
}
},
Iterators.recur(this, ifaceGetter)),
methodGetter);
}
/**
* Return a list of methods, first those declared on this class, then those declared on the superclass (recurse) and then those declared
* on the superinterfaces. The getMethods() call above doesn't quite work the same as it will (through the iterator) return methods
* declared on *this* class twice, once at the start and once at the end - I couldn't debug that problem, so created this alternative.
*/
public List getMethodsWithoutIterator(boolean includeITDs) {
List methods = new ArrayList();
Set knowninterfaces = new HashSet();
addAndRecurse(knowninterfaces,methods,this,includeITDs);
return methods;
}
private void addAndRecurse(Set knowninterfaces,List collector, ResolvedType rtx, boolean includeITDs) {
collector.addAll(Arrays.asList(rtx.getDeclaredMethods())); // Add the methods declared on this type
// now add all the inter-typed members too
if (includeITDs && rtx.interTypeMungers != null) {
for (Iterator i = interTypeMungers.iterator(); i.hasNext();) {
ConcreteTypeMunger tm = (ConcreteTypeMunger) i.next();
ResolvedMember rm = tm.getSignature();
if (rm != null) { // new parent type munger can have null signature...
collector.add(tm.getSignature());
}
}
}
if (!rtx.equals(ResolvedType.OBJECT)) addAndRecurse(knowninterfaces,collector,rtx.getSuperclass(),includeITDs); // Recurse if we aren't at the top
ResolvedType[] interfaces = rtx.getDeclaredInterfaces(); // Go through the interfaces on the way back down
for (int i = 0; i < interfaces.length; i++) {
ResolvedType iface = interfaces[i];
if (!knowninterfaces.contains(iface)) { // Dont do interfaces more than once
knowninterfaces.add(iface);
addAndRecurse(knowninterfaces,collector,iface,includeITDs);
}
}
}
public ResolvedType[] getResolvedTypeParameters() {
if (resolvedTypeParams == null) {
resolvedTypeParams = world.resolve(typeParameters);
}
return resolvedTypeParams;
}
/**
* described in JVM spec 2ed 5.4.3.2
*/
public ResolvedMember lookupField(Member m) {
return lookupMember(m, getFields());
}
/**
* described in JVM spec 2ed 5.4.3.3.
* Doesnt check ITDs.
*/
public ResolvedMember lookupMethod(Member m) {
return lookupMember(m, getMethods());
}
public ResolvedMember lookupMethodInITDs(Member m) {
if (interTypeMungers != null) {
for (Iterator i = interTypeMungers.iterator(); i.hasNext();) {
ConcreteTypeMunger tm = (ConcreteTypeMunger) i.next();
if (matches(tm.getSignature(), m)) {
return tm.getSignature();
}
}
}
return null;
}
/** return null if not found */
private ResolvedMember lookupMember(Member m, Iterator i) {
while (i.hasNext()) {
ResolvedMember f = (ResolvedMember) i.next();
if (matches(f, m)) return f;
}
return null; //ResolvedMember.Missing;
//throw new BCException("can't find " + m);
}
/** return null if not found */
private ResolvedMember lookupMember(Member m, ResolvedMember[] a) {
for (int i = 0; i < a.length; i++) {
ResolvedMember f = a[i];
if (matches(f, m)) return f;
}
return null;
}
/**
* Looks for the first member in the hierarchy matching aMember. This method
* differs from lookupMember(Member) in that it takes into account parameters
* which are type variables - which clearly an unresolved Member cannot do since
* it does not know anything about type variables.
*/
public ResolvedMember lookupResolvedMember(ResolvedMember aMember) {
Iterator toSearch = null;
ResolvedMember found = null;
if ((aMember.getKind() == Member.METHOD) || (aMember.getKind() == Member.CONSTRUCTOR)) {
toSearch = getMethodsWithoutIterator(true).iterator();
} else {
if (aMember.getKind() != Member.FIELD)
throw new IllegalStateException("I didn't know you would look for members of kind " + aMember.getKind());
toSearch = getFields();
}
while(toSearch.hasNext()) {
ResolvedMemberImpl candidate = (ResolvedMemberImpl) toSearch.next();
if (candidate.matches(aMember)) {
found = candidate;
break;
}
}
return found;
}
public static boolean matches(Member m1, Member m2) {
if (m1 == null) return m2 == null;
if (m2 == null) return false;
// Check the names
boolean equalNames = m1.getName().equals(m2.getName());
if (!equalNames) return false;
// Check the signatures
boolean equalSignatures = m1.getSignature().equals(m2.getSignature());
if (equalSignatures) return true;
// If they aren't the same, we need to allow for covariance ... where one sig might be ()LCar; and
// the subsig might be ()LFastCar; - where FastCar is a subclass of Car
boolean equalCovariantSignatures = m1.getParameterSignature().equals(m2.getParameterSignature());
if (equalCovariantSignatures) return true;
return false;
}
public static boolean conflictingSignature(Member m1, Member m2) {
if (m1 == null || m2 == null) return false;
if (!m1.getName().equals(m2.getName())) { return false; }
if (m1.getKind() != m2.getKind()) { return false; }
if (m1.getKind() == Member.FIELD) {
return m1.getDeclaringType().equals(m2.getDeclaringType());
} else if (m1.getKind() == Member.POINTCUT) {
return true;
}
UnresolvedType[] p1 = m1.getParameterTypes();
UnresolvedType[] p2 = m2.getParameterTypes();
int n = p1.length;
if (n != p2.length) return false;
for (int i=0; i < n; i++) {
if (!p1[i].equals(p2[i])) return false;
}
return true;
}
/**
* returns an iterator through all of the pointcuts of this type, in order
* for checking from JVM spec 2ed 5.4.3.2 (as for fields). This means that the order is
*
* <ul><li> pointcuts from current class </li>
* <li> recur into direct superinterfaces </li>
* <li> recur into superclass </li>
* </ul>
*
* We keep a hashSet of interfaces that we've visited so we don't spiral
* out into 2^n land.
*/
public Iterator getPointcuts() {
final Iterators.Filter dupFilter = Iterators.dupFilter();
// same order as fields
Iterators.Getter typeGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return
dupFilter.filter(
((ResolvedType)o).getDirectSupertypes());
}
};
Iterators.Getter pointcutGetter = new Iterators.Getter() {
public Iterator get(Object o) {
//System.err.println("getting for " + o);
return Iterators.array(((ResolvedType)o).getDeclaredPointcuts());
}
};
return
Iterators.mapOver(
Iterators.recur(this, typeGetter),
pointcutGetter);
}
public ResolvedPointcutDefinition findPointcut(String name) {
//System.err.println("looking for pointcuts " + this);
for (Iterator i = getPointcuts(); i.hasNext(); ) {
ResolvedPointcutDefinition f = (ResolvedPointcutDefinition) i.next();
//System.err.println(f);
if (name.equals(f.getName())) {
return f;
}
}
return null; // should we throw an exception here?
}
// all about collecting CrosscuttingMembers
//??? collecting data-structure, shouldn't really be a field
public CrosscuttingMembers crosscuttingMembers;
public CrosscuttingMembers collectCrosscuttingMembers() {
crosscuttingMembers = new CrosscuttingMembers(this);
crosscuttingMembers.setPerClause(getPerClause());
crosscuttingMembers.addShadowMungers(collectShadowMungers());
crosscuttingMembers.addTypeMungers(getTypeMungers());
//FIXME AV - skip but needed ?? or ?? crosscuttingMembers.addLateTypeMungers(getLateTypeMungers());
crosscuttingMembers.addDeclares(collectDeclares(!this.doesNotExposeShadowMungers()));
crosscuttingMembers.addPrivilegedAccesses(getPrivilegedAccesses());
//System.err.println("collected cc members: " + this + ", " + collectDeclares());
return crosscuttingMembers;
}
public final Collection collectDeclares(boolean includeAdviceLike) {
if (! this.isAspect() ) return Collections.EMPTY_LIST;
ArrayList ret = new ArrayList();
//if (this.isAbstract()) {
// for (Iterator i = getDeclares().iterator(); i.hasNext();) {
// Declare dec = (Declare) i.next();
// if (!dec.isAdviceLike()) ret.add(dec);
// }
//
// if (!includeAdviceLike) return ret;
if (!this.isAbstract()) {
//ret.addAll(getDeclares());
final Iterators.Filter dupFilter = Iterators.dupFilter();
Iterators.Getter typeGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return
dupFilter.filter(
((ResolvedType)o).getDirectSupertypes());
}
};
Iterator typeIterator = Iterators.recur(this, typeGetter);
while (typeIterator.hasNext()) {
ResolvedType ty = (ResolvedType) typeIterator.next();
//System.out.println("super: " + ty + ", " + );
for (Iterator i = ty.getDeclares().iterator(); i.hasNext();) {
Declare dec = (Declare) i.next();
if (dec.isAdviceLike()) {
if (includeAdviceLike) ret.add(dec);
} else {
ret.add(dec);
}
}
}
}
return ret;
}
private final Collection collectShadowMungers() {
if (! this.isAspect() || this.isAbstract() || this.doesNotExposeShadowMungers()) return Collections.EMPTY_LIST;
ArrayList acc = new ArrayList();
final Iterators.Filter dupFilter = Iterators.dupFilter();
Iterators.Getter typeGetter = new Iterators.Getter() {
public Iterator get(Object o) {
return
dupFilter.filter(
((ResolvedType)o).getDirectSupertypes());
}
};
Iterator typeIterator = Iterators.recur(this, typeGetter);
while (typeIterator.hasNext()) {
ResolvedType ty = (ResolvedType) typeIterator.next();
acc.addAll(ty.getDeclaredShadowMungers());
}
return acc;
}
protected boolean doesNotExposeShadowMungers() {
return false;
}
public PerClause getPerClause() { return null; }
protected Collection getDeclares() {
return Collections.EMPTY_LIST;
}
protected Collection getTypeMungers() { return Collections.EMPTY_LIST; }
protected Collection getPrivilegedAccesses() { return Collections.EMPTY_LIST; }
// ---- useful things
public final boolean isInterface() {
return Modifier.isInterface(getModifiers());
}
public final boolean isAbstract() {
return Modifier.isAbstract(getModifiers());
}
public boolean isClass() {
return false;
}
public boolean isAspect() {
return false;
}
public boolean isAnnotationStyleAspect() {
return false;
}
/**
* Note: Only overridden by Name subtype.
*/
public boolean isEnum() {
return false;
}
/**
* Note: Only overridden by Name subtype.
*/
public boolean isAnnotation() {
return false;
}
/**
* Note: Only overridden by Name subtype
*/
public void addAnnotation(AnnotationX annotationX) {
throw new RuntimeException("ResolvedType.addAnnotation() should never be called");
}
/**
* Note: Only overridden by Name subtype
*/
public AnnotationX[] getAnnotations() {
throw new RuntimeException("ResolvedType.getAnnotations() should never be called");
}
/**
* Note: Only overridden by Name subtype.
*/
public boolean isAnnotationWithRuntimeRetention() {
return false;
}
public boolean isSynthetic() {
return signature.indexOf("$ajc") != -1;
}
public final boolean isFinal() {
return Modifier.isFinal(getModifiers());
}
protected Map /*Type variable name -> UnresolvedType*/ getMemberParameterizationMap() {
if (!isParameterizedType()) return Collections.EMPTY_MAP;
TypeVariable[] tvs = getGenericType().getTypeVariables();
Map parameterizationMap = new HashMap();
for (int i = 0; i < tvs.length; i++) {
parameterizationMap.put(tvs[i].getName(), typeParameters[i]);
}
return parameterizationMap;
}
public Collection getDeclaredAdvice() {
List l = new ArrayList();
ResolvedMember[] methods = getDeclaredMethods();
if (isParameterizedType()) methods = getGenericType().getDeclaredMethods();
Map typeVariableMap = getMemberParameterizationMap();
for (int i=0, len = methods.length; i < len; i++) {
ShadowMunger munger = methods[i].getAssociatedShadowMunger();
if (munger != null) {
if (this.isParameterizedType()) {
munger.setPointcut(munger.getPointcut().parameterizeWith(typeVariableMap));
}
l.add(munger);
}
}
return l;
}
public Collection getDeclaredShadowMungers() {
Collection c = getDeclaredAdvice();
return c;
}
// ---- only for testing!
public ResolvedMember[] getDeclaredJavaFields() {
return filterInJavaVisible(getDeclaredFields());
}
public ResolvedMember[] getDeclaredJavaMethods() {
return filterInJavaVisible(getDeclaredMethods());
}
public ShadowMunger[] getDeclaredShadowMungersArray() {
List l = (List) getDeclaredShadowMungers();
return (ShadowMunger[]) l.toArray(new ShadowMunger[l.size()]);
}
private ResolvedMember[] filterInJavaVisible(ResolvedMember[] ms) {
List l = new ArrayList();
for (int i=0, len = ms.length; i < len; i++) {
if (! ms[i].isAjSynthetic() && ms[i].getAssociatedShadowMunger() == null) {
l.add(ms[i]);
}
}
return (ResolvedMember[]) l.toArray(new ResolvedMember[l.size()]);
}
public abstract ISourceContext getSourceContext();
// ---- fields
public static final ResolvedType[] NONE = new ResolvedType[0];
public static final Primitive BYTE = new Primitive("B", 1, 0);
public static final Primitive CHAR = new Primitive("C", 1, 1);
public static final Primitive DOUBLE = new Primitive("D", 2, 2);
public static final Primitive FLOAT = new Primitive("F", 1, 3);
public static final Primitive INT = new Primitive("I", 1, 4);
public static final Primitive LONG = new Primitive("J", 2, 5);
public static final Primitive SHORT = new Primitive("S", 1, 6);
public static final Primitive VOID = new Primitive("V", 0, 8);
public static final Primitive BOOLEAN = new Primitive("Z", 1, 7);
public static final Missing MISSING = new Missing();
// ---- types
public static ResolvedType makeArray(ResolvedType type, int dim) {
if (dim == 0) return type;
ResolvedType array = new Array("[" + type.getSignature(),type.getWorld(),type);
return makeArray(array,dim-1);
}
static class Array extends ResolvedType {
ResolvedType componentType;
Array(String s, World world, ResolvedType componentType) {
super(s, world);
this.componentType = componentType;
}
public final ResolvedMember[] getDeclaredFields() {
return ResolvedMember.NONE;
}
public final ResolvedMember[] getDeclaredMethods() {
// ??? should this return clone? Probably not...
// If it ever does, here is the code:
// ResolvedMember cloneMethod =
// new ResolvedMember(Member.METHOD,this,Modifier.PUBLIC,UnresolvedType.OBJECT,"clone",new UnresolvedType[]{});
// return new ResolvedMember[]{cloneMethod};
return ResolvedMember.NONE;
}
public final ResolvedType[] getDeclaredInterfaces() {
return
new ResolvedType[] {
world.getCoreType(CLONEABLE),
world.getCoreType(SERIALIZABLE)
};
}
public final ResolvedMember[] getDeclaredPointcuts() {
return ResolvedMember.NONE;
}
public boolean hasAnnotation(UnresolvedType ofType) {
return false;
}
public final ResolvedType getSuperclass() {
return world.getCoreType(OBJECT);
}
public final boolean isAssignableFrom(ResolvedType o) {
if (! o.isArray()) return false;
if (o.getComponentType().isPrimitiveType()) {
return o.equals(this);
} else {
return getComponentType().resolve(world).isAssignableFrom(o.getComponentType().resolve(world));
}
}
public final boolean isCoerceableFrom(ResolvedType o) {
if (o.equals(UnresolvedType.OBJECT) ||
o.equals(UnresolvedType.SERIALIZABLE) ||
o.equals(UnresolvedType.CLONEABLE)) {
return true;
}
if (! o.isArray()) return false;
if (o.getComponentType().isPrimitiveType()) {
return o.equals(this);
} else {
return getComponentType().resolve(world).isCoerceableFrom(o.getComponentType().resolve(world));
}
}
public final int getModifiers() {
int mask = Modifier.PUBLIC | Modifier.PRIVATE | Modifier.PROTECTED;
return (componentType.getModifiers() & mask) | Modifier.FINAL;
}
public UnresolvedType getComponentType() {
return componentType;
}
public ResolvedType getResolvedComponentType() {
return componentType;
}
public ISourceContext getSourceContext() {
return getResolvedComponentType().getSourceContext();
}
}
static class Primitive extends ResolvedType {
private int size;
private int index;
Primitive(String signature, int size, int index) {
super(signature, null);
this.size = size;
this.index = index;
}
public final int getSize() {
return size;
}
public final int getModifiers() {
return Modifier.PUBLIC | Modifier.FINAL;
}
public final boolean isPrimitiveType() {
return true;
}
public boolean hasAnnotation(UnresolvedType ofType) {
return false;
}
public final boolean isAssignableFrom(ResolvedType other) {
if (!other.isPrimitiveType()) {
if (!world.isInJava5Mode()) return false;
return validBoxing.contains(this.getSignature()+other.getSignature());
}
return assignTable[((Primitive)other).index][index];
}
public final boolean isCoerceableFrom(ResolvedType other) {
if (this == other) return true;
if (! other.isPrimitiveType()) return false;
if (index > 6 || ((Primitive)other).index > 6) return false;
return true;
}
public ResolvedType resolve(World world) {
this.world = world;
return super.resolve(world);
}
public final boolean needsNoConversionFrom(ResolvedType other) {
if (! other.isPrimitiveType()) return false;
return noConvertTable[((Primitive)other).index][index];
}
private static final boolean[][] assignTable =
{// to: B C D F I J S V Z from
{ true , true , true , true , true , true , true , false, false }, // B
{ false, true , true , true , true , true , false, false, false }, // C
{ false, false, true , false, false, false, false, false, false }, // D
{ false, false, true , true , false, false, false, false, false }, // F
{ false, false, true , true , true , true , false, false, false }, // I
{ false, false, true , true , false, true , false, false, false }, // J
{ false, false, true , true , true , true , true , false, false }, // S
{ false, false, false, false, false, false, false, true , false }, // V
{ false, false, false, false, false, false, false, false, true }, // Z
};
private static final boolean[][] noConvertTable =
{// to: B C D F I J S V Z from
{ true , true , false, false, true , false, true , false, false }, // B
{ false, true , false, false, true , false, false, false, false }, // C
{ false, false, true , false, false, false, false, false, false }, // D
{ false, false, false, true , false, false, false, false, false }, // F
{ false, false, false, false, true , false, false, false, false }, // I
{ false, false, false, false, false, true , false, false, false }, // J
{ false, false, false, false, true , false, true , false, false }, // S
{ false, false, false, false, false, false, false, true , false }, // V
{ false, false, false, false, false, false, false, false, true }, // Z
};
// ----
public final ResolvedMember[] getDeclaredFields() {
return ResolvedMember.NONE;
}
public final ResolvedMember[] getDeclaredMethods() {
return ResolvedMember.NONE;
}
public final ResolvedType[] getDeclaredInterfaces() {
return ResolvedType.NONE;
}
public final ResolvedMember[] getDeclaredPointcuts() {
return ResolvedMember.NONE;
}
public final ResolvedType getSuperclass() {
return null;
}
public ISourceContext getSourceContext() {
return null;
}
}
static class Missing extends ResolvedType {
Missing() {
super(MISSING_NAME, null);
}
// public final String toString() {
// return "<missing>";
// }
public final String getName() {
return MISSING_NAME;
}
public boolean hasAnnotation(UnresolvedType ofType) {
return false;
}
public final ResolvedMember[] getDeclaredFields() {
return ResolvedMember.NONE;
}
public final ResolvedMember[] getDeclaredMethods() {
return ResolvedMember.NONE;
}
public final ResolvedType[] getDeclaredInterfaces() {
return ResolvedType.NONE;
}
public final ResolvedMember[] getDeclaredPointcuts() {
return ResolvedMember.NONE;
}
public final ResolvedType getSuperclass() {
return null;
}
public final int getModifiers() {
return 0;
}
public final boolean isAssignableFrom(ResolvedType other) {
return false;
}
public final boolean isCoerceableFrom(ResolvedType other) {
return false;
}
public boolean needsNoConversionFrom(ResolvedType other) {
return false;
}
public ISourceContext getSourceContext() {
return null;
}
}
/**
* Look up a member, takes into account any ITDs on this type.
* return null if not found */
public ResolvedMember lookupMemberNoSupers(Member member) {
ResolvedMember ret;
if (member.getKind() == Member.FIELD) {
ret = lookupMember(member, getDeclaredFields());
} else {
// assert member.getKind() == Member.METHOD || member.getKind() == Member.CONSTRUCTOR
ret = lookupMember(member, getDeclaredMethods());
}
if (ret == null && interTypeMungers != null) {
for (Iterator i = interTypeMungers.iterator(); i.hasNext();) {
ConcreteTypeMunger tm = (ConcreteTypeMunger) i.next();
if (matches(tm.getSignature(), member)) {
return tm.getSignature();
}
}
}
return ret;
}
protected List interTypeMungers = new ArrayList(0);
public List getInterTypeMungers() {
return interTypeMungers;
}
public List getInterTypeParentMungers() {
List l = new ArrayList();
for (Iterator iter = interTypeMungers.iterator(); iter.hasNext();) {
ConcreteTypeMunger element = (ConcreteTypeMunger) iter.next();
if (element.getMunger() instanceof NewParentTypeMunger) l.add(element);
}
return l;
}
/**
* ??? This method is O(N*M) where N = number of methods and M is number of
* inter-type declarations in my super
*/
public List getInterTypeMungersIncludingSupers() {
ArrayList ret = new ArrayList();
collectInterTypeMungers(ret);
return ret;
}
public List getInterTypeParentMungersIncludingSupers() {
ArrayList ret = new ArrayList();
collectInterTypeParentMungers(ret);
return ret;
}
private void collectInterTypeParentMungers(List collector) {
for (Iterator iter = getDirectSupertypes(); iter.hasNext();) {
ResolvedType superType = (ResolvedType) iter.next();
superType.collectInterTypeParentMungers(collector);
}
collector.addAll(getInterTypeParentMungers());
}
private void collectInterTypeMungers(List collector) {
for (Iterator iter = getDirectSupertypes(); iter.hasNext();) {
ResolvedType superType = (ResolvedType) iter.next();
superType.collectInterTypeMungers(collector);
}
outer: for (Iterator iter1 = collector.iterator(); iter1.hasNext(); ) {
ConcreteTypeMunger superMunger = (ConcreteTypeMunger) iter1.next();
if ( superMunger.getSignature() == null) continue;
if ( !superMunger.getSignature().isAbstract()) continue;
for (Iterator iter = getInterTypeMungers().iterator(); iter.hasNext();) {
ConcreteTypeMunger myMunger = (ConcreteTypeMunger) iter.next();
if (conflictingSignature(myMunger.getSignature(), superMunger.getSignature())) {
iter1.remove();
continue outer;
}
}
if (!superMunger.getSignature().isPublic()) continue;
for (Iterator iter = getMethods(); iter.hasNext(); ) {
ResolvedMember method = (ResolvedMember)iter.next();
if (conflictingSignature(method, superMunger.getSignature())) {
iter1.remove();
continue outer;
}
}
}
collector.addAll(getInterTypeMungers());
}
/**
* Check:
* 1) That we don't have any abstract type mungers unless this type is abstract.
* 2) That an abstract ITDM on an interface is declared public. (Compiler limitation) (PR70794)
*/
public void checkInterTypeMungers() {
if (isAbstract()) return;
boolean itdProblem = false;
for (Iterator iter = getInterTypeMungersIncludingSupers().iterator(); iter.hasNext();) {
ConcreteTypeMunger munger = (ConcreteTypeMunger) iter.next();
itdProblem = checkAbstractDeclaration(munger) || itdProblem; // Rule 2
}
if (itdProblem) return; // If the rules above are broken, return right now
for (Iterator iter = getInterTypeMungersIncludingSupers().iterator(); iter.hasNext();) {
ConcreteTypeMunger munger = (ConcreteTypeMunger) iter.next();
if (munger.getSignature() != null && munger.getSignature().isAbstract()) { // Rule 1
world.getMessageHandler().handleMessage(
new Message("must implement abstract inter-type declaration: " + munger.getSignature(),
"", IMessage.ERROR, getSourceLocation(), null,
new ISourceLocation[] { getMungerLocation(munger) }));
}
}
}
/**
* See PR70794. This method checks that if an abstract inter-type method declaration is made on
* an interface then it must also be public.
* This is a compiler limitation that could be made to work in the future (if someone
* provides a worthwhile usecase)
*
* @return indicates if the munger failed the check
*/
private boolean checkAbstractDeclaration(ConcreteTypeMunger munger) {
if (munger.getMunger()!=null && (munger.getMunger() instanceof NewMethodTypeMunger)) {
ResolvedMember itdMember = munger.getSignature();
ResolvedType onType = itdMember.getDeclaringType().resolve(world);
if (onType.isInterface() && itdMember.isAbstract() && !itdMember.isPublic()) {
world.getMessageHandler().handleMessage(
new Message(WeaverMessages.format(WeaverMessages.ITD_ABSTRACT_MUST_BE_PUBLIC_ON_INTERFACE,munger.getSignature(),onType),"",
Message.ERROR,getSourceLocation(),null,
new ISourceLocation[]{getMungerLocation(munger)})
);
return true;
}
}
return false;
}
/**
* Get a source location for the munger.
* Until intertype mungers remember where they came from, the source location
* for the munger itself is null. In these cases use the
* source location for the aspect containing the ITD.
*
*/
private ISourceLocation getMungerLocation(ConcreteTypeMunger munger) {
ISourceLocation sloc = munger.getSourceLocation();
if (sloc == null) {
sloc = munger.getAspectType().getSourceLocation();
}
return sloc;
}
/**
* Returns a ResolvedType object representing the declaring type of this type, or
* null if this type does not represent a non-package-level-type.
*
* <strong>Warning</strong>: This is guaranteed to work for all member types.
* For anonymous/local types, the only guarantee is given in JLS 13.1, where
* it guarantees that if you call getDeclaringType() repeatedly, you will eventually
* get the top-level class, but it does not say anything about classes in between.
*
* @return the declaring UnresolvedType object, or null.
*/
public ResolvedType getDeclaringType() {
if (isArray()) return null;
String name = getName();
int lastDollar = name.lastIndexOf('$');
while (lastDollar != -1) {
ResolvedType ret = world.resolve(UnresolvedType.forName(name.substring(0, lastDollar)), true);
if (ret != ResolvedType.MISSING) return ret;
lastDollar = name.lastIndexOf('$', lastDollar-1);
}
return null;
}
public static boolean isVisible(int modifiers, ResolvedType targetType, ResolvedType fromType) {
//System.err.println("mod: " + modifiers + ", " + targetType + " and " + fromType);
if (Modifier.isPublic(modifiers)) {
return true;
} else if (Modifier.isPrivate(modifiers)) {
return targetType.getOutermostType().equals(fromType.getOutermostType());
} else if (Modifier.isProtected(modifiers)) {
return samePackage(targetType, fromType) || targetType.isAssignableFrom(fromType);
} else { // package-visible
return samePackage(targetType, fromType);
}
}
public static boolean hasBridgeModifier(int modifiers) {
return (modifiers & Constants.ACC_BRIDGE)!=0;
}
private static boolean samePackage(
ResolvedType targetType,
ResolvedType fromType)
{
String p1 = targetType.getPackageName();
String p2 = fromType.getPackageName();
if (p1 == null) return p2 == null;
if (p2 == null) return false;
return p1.equals(p2);
}
public void addInterTypeMunger(ConcreteTypeMunger munger) {
ResolvedMember sig = munger.getSignature();
if (sig == null || munger.getMunger() == null ||
munger.getMunger().getKind() == ResolvedTypeMunger.PrivilegedAccess)
{
interTypeMungers.add(munger);
return;
}
//System.err.println("add: " + munger + " to " + this.getClassName() + " with " + interTypeMungers);
if (sig.getKind() == Member.METHOD) {
if (!compareToExistingMembers(munger, getMethods())) return;
if (this.isInterface()) {
if (!compareToExistingMembers(munger,
Arrays.asList(world.getCoreType(OBJECT).getDeclaredMethods()).iterator())) return;
}
} else if (sig.getKind() == Member.FIELD) {
if (!compareToExistingMembers(munger, Arrays.asList(getDeclaredFields()).iterator())) return;
} else {
if (!compareToExistingMembers(munger, Arrays.asList(getDeclaredMethods()).iterator())) return;
}
// now compare to existingMungers
for (Iterator i = interTypeMungers.iterator(); i.hasNext(); ) {
ConcreteTypeMunger existingMunger = (ConcreteTypeMunger)i.next();
if (conflictingSignature(existingMunger.getSignature(), munger.getSignature())) {
//System.err.println("match " + munger + " with " + existingMunger);
if (isVisible(munger.getSignature().getModifiers(),
munger.getAspectType(), existingMunger.getAspectType()))
{
//System.err.println(" is visible");
int c = compareMemberPrecedence(sig, existingMunger.getSignature());
if (c == 0) {
c = getWorld().compareByPrecedenceAndHierarchy(munger.getAspectType(), existingMunger.getAspectType());
}
//System.err.println(" compare: " + c);
if (c < 0) {
// the existing munger dominates the new munger
checkLegalOverride(munger.getSignature(), existingMunger.getSignature());
return;
} else if (c > 0) {
// the new munger dominates the existing one
checkLegalOverride(existingMunger.getSignature(), munger.getSignature());
i.remove();
break;
} else {
interTypeConflictError(munger, existingMunger);
interTypeConflictError(existingMunger, munger);
return;
}
}
}
}
//System.err.println("adding: " + munger + " to " + this);
interTypeMungers.add(munger);
}
//??? returning too soon
private boolean compareToExistingMembers(ConcreteTypeMunger munger, Iterator existingMembers) {
ResolvedMember sig = munger.getSignature();
while (existingMembers.hasNext()) {
ResolvedMember existingMember = (ResolvedMember)existingMembers.next();
//System.err.println("Comparing munger: "+sig+" with member "+existingMember);
if (conflictingSignature(existingMember, munger.getSignature())) {
//System.err.println("conflict: existingMember=" + existingMember + " typeMunger=" + munger);
//System.err.println(munger.getSourceLocation() + ", " + munger.getSignature() + ", " + munger.getSignature().getSourceLocation());
if (isVisible(existingMember.getModifiers(), this, munger.getAspectType())) {
int c = compareMemberPrecedence(sig, existingMember);
//System.err.println(" c: " + c);
if (c < 0) {
// existingMember dominates munger
checkLegalOverride(munger.getSignature(), existingMember);
return false;
} else if (c > 0) {
// munger dominates existingMember
checkLegalOverride(existingMember, munger.getSignature());
//interTypeMungers.add(munger);
//??? might need list of these overridden abstracts
continue;
} else {
//XXX dual errors possible if (this instanceof BcelObjectType) return false; //XXX ignores separate comp
getWorld().getMessageHandler().handleMessage(
MessageUtil.error(WeaverMessages.format(WeaverMessages.ITD_MEMBER_CONFLICT,munger.getAspectType().getName(),
existingMember),
munger.getSourceLocation())
);
}
} else if (isDuplicateMemberWithinTargetType(existingMember,this,sig)) {
getWorld().getMessageHandler().handleMessage(
MessageUtil.error(WeaverMessages.format(WeaverMessages.ITD_MEMBER_CONFLICT,munger.getAspectType().getName(),
existingMember),
munger.getSourceLocation())
);;
}
//return;
}
}
return true;
}
// we know that the member signature matches, but that the member in the target type is not visible to the aspect.
// this may still be disallowed if it would result in two members within the same declaring type with the same
// signature AND more than one of them is concrete AND they are both visible within the target type.
private boolean isDuplicateMemberWithinTargetType(ResolvedMember existingMember, ResolvedType targetType,ResolvedMember itdMember) {
if ( (existingMember.isAbstract() || itdMember.isAbstract())) return false;
UnresolvedType declaringType = existingMember.getDeclaringType();
if (!targetType.equals(declaringType)) return false;
// now have to test that itdMember is visible from targetType
if (itdMember.isPrivate()) return false;
if (itdMember.isPublic()) return true;
// must be in same package to be visible then...
if (!targetType.getPackageName().equals(itdMember.getDeclaringType().getPackageName())) return false;
// trying to put two members with the same signature into the exact same type..., and both visible in that type.
return true;
}
/**
* @return true if the override is legal
* note: calling showMessage with two locations issues TWO messages, not ONE message
* with an additional source location.
*/
public boolean checkLegalOverride(ResolvedMember parent, ResolvedMember child) {
//System.err.println("check: " + child.getDeclaringType() + " overrides " + parent.getDeclaringType());
if (Modifier.isFinal(parent.getModifiers())) {
world.showMessage(Message.ERROR,
WeaverMessages.format(WeaverMessages.CANT_OVERRIDE_FINAL_MEMBER,parent),
child.getSourceLocation(),null);
return false;
}
if (!parent.getReturnType().equals(child.getReturnType())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_RETURN_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
if (parent.getKind() == Member.POINTCUT) {
UnresolvedType[] pTypes = parent.getParameterTypes();
UnresolvedType[] cTypes = child.getParameterTypes();
if (!Arrays.equals(pTypes, cTypes)) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_PARAM_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
}
//System.err.println("check: " + child.getModifiers() + " more visible " + parent.getModifiers());
if (isMoreVisible(parent.getModifiers(), child.getModifiers())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_VISIBILITY_REDUCTION,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
// check declared exceptions
ResolvedType[] childExceptions = world.resolve(child.getExceptions());
ResolvedType[] parentExceptions = world.resolve(parent.getExceptions());
ResolvedType runtimeException = world.resolve("java.lang.RuntimeException");
ResolvedType error = world.resolve("java.lang.Error");
outer: for (int i=0, leni = childExceptions.length; i < leni; i++) {
//System.err.println("checking: " + childExceptions[i]);
if (runtimeException.isAssignableFrom(childExceptions[i])) continue;
if (error.isAssignableFrom(childExceptions[i])) continue;
for (int j = 0, lenj = parentExceptions.length; j < lenj; j++) {
if (parentExceptions[j].isAssignableFrom(childExceptions[i])) continue outer;
}
- world.showMessage(IMessage.ERROR,
- WeaverMessages.format(WeaverMessages.ITD_DOESNT_THROW,childExceptions[i].getName()),
- child.getSourceLocation(), null);
+ // this message is now better handled my MethodVerifier in JDT core.
+// world.showMessage(IMessage.ERROR,
+// WeaverMessages.format(WeaverMessages.ITD_DOESNT_THROW,childExceptions[i].getName()),
+// child.getSourceLocation(), null);
return false;
}
if (parent.isStatic() && !child.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERRIDDEN_STATIC,child,parent),
child.getSourceLocation(),null);
+ return false;
} else if (child.isStatic() && !parent.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERIDDING_STATIC,child,parent),
child.getSourceLocation(),null);
+ return false;
}
return true;
}
private int compareMemberPrecedence(ResolvedMember m1, ResolvedMember m2) {
//if (!m1.getReturnType().equals(m2.getReturnType())) return 0;
// need to allow for the special case of 'clone' - which is like abstract but is
// not marked abstract. The code below this next line seems to make assumptions
// about what will have gotten through the compiler based on the normal
// java rules. clone goes against these...
if (m2.isProtected() && m2.isNative() && m2.getName().equals("clone")) return +1;
if (Modifier.isAbstract(m1.getModifiers())) return -1;
if (Modifier.isAbstract(m2.getModifiers())) return +1;
if (m1.getDeclaringType().equals(m2.getDeclaringType())) return 0;
ResolvedType t1 = m1.getDeclaringType().resolve(world);
ResolvedType t2 = m2.getDeclaringType().resolve(world);
if (t1.isAssignableFrom(t2)) {
return -1;
}
if (t2.isAssignableFrom(t1)) {
return +1;
}
return 0;
}
public static boolean isMoreVisible(int m1, int m2) {
if (Modifier.isPrivate(m1)) return false;
if (isPackage(m1)) return Modifier.isPrivate(m2);
if (Modifier.isProtected(m1)) return /* private package */ (Modifier.isPrivate(m2) || isPackage(m2));
if (Modifier.isPublic(m1)) return /* private package protected */ ! Modifier.isPublic(m2);
throw new RuntimeException("bad modifier: " + m1);
}
private static boolean isPackage(int i) {
return (0 == (i & (Modifier.PUBLIC | Modifier.PRIVATE | Modifier.PROTECTED)));
}
private void interTypeConflictError(
ConcreteTypeMunger m1,
ConcreteTypeMunger m2)
{
//XXX this works only if we ignore separate compilation issues
//XXX dual errors possible if (this instanceof BcelObjectType) return;
//System.err.println("conflict at " + m2.getSourceLocation());
getWorld().showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_CONFLICT,m1.getAspectType().getName(),
m2.getSignature(),m2.getAspectType().getName()),
m2.getSourceLocation(), getSourceLocation());
}
public ResolvedMember lookupSyntheticMember(Member member) {
//??? horribly inefficient
//for (Iterator i =
//System.err.println("lookup " + member + " in " + interTypeMungers);
for (Iterator i = interTypeMungers.iterator(); i.hasNext(); ) {
ConcreteTypeMunger m = (ConcreteTypeMunger)i.next();
ResolvedMember ret = m.getMatchingSyntheticMember(member);
if (ret != null) {
//System.err.println(" found: " + ret);
return ret;
}
}
return null;
}
public void clearInterTypeMungers() {
if (isRawType()) getGenericType().clearInterTypeMungers();
interTypeMungers = new ArrayList();
}
public boolean isTopmostImplementor(ResolvedType interfaceType) {
if (isInterface()) return false;
if (!interfaceType.isAssignableFrom(this)) return false;
// check that I'm truly the topmost implementor
if (interfaceType.isAssignableFrom(this.getSuperclass())) {
return false;
}
return true;
}
public ResolvedType getTopmostImplementor(ResolvedType interfaceType) {
if (isInterface()) return null;
if (!interfaceType.isAssignableFrom(this)) return null;
// Check if my super class is an implementor?
ResolvedType higherType = this.getSuperclass().getTopmostImplementor(interfaceType);
if (higherType!=null) return higherType;
return this;
}
private ResolvedType findHigher(ResolvedType other) {
if (this == other) return this;
for(Iterator i = other.getDirectSupertypes(); i.hasNext(); ) {
ResolvedType rtx = (ResolvedType)i.next();
boolean b = this.isAssignableFrom(rtx);
if (b) return rtx;
}
return null;
}
public List getExposedPointcuts() {
List ret = new ArrayList();
if (getSuperclass() != null) ret.addAll(getSuperclass().getExposedPointcuts());
for (Iterator i = Arrays.asList(getDeclaredInterfaces()).iterator(); i.hasNext(); ) {
ResolvedType t = (ResolvedType)i.next();
addPointcutsResolvingConflicts(ret, Arrays.asList(t.getDeclaredPointcuts()), false);
}
addPointcutsResolvingConflicts(ret, Arrays.asList(getDeclaredPointcuts()), true);
for (Iterator i = ret.iterator(); i.hasNext(); ) {
ResolvedPointcutDefinition inherited = (ResolvedPointcutDefinition)i.next();
// System.err.println("looking at: " + inherited + " in " + this);
// System.err.println(" " + inherited.isAbstract() + " in " + this.isAbstract());
if (inherited.isAbstract()) {
if (!this.isAbstract()) {
getWorld().showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.POINCUT_NOT_CONCRETE,inherited,this.getName()),
inherited.getSourceLocation(), this.getSourceLocation());
}
}
}
return ret;
}
private void addPointcutsResolvingConflicts(List acc, List added, boolean isOverriding) {
for (Iterator i = added.iterator(); i.hasNext();) {
ResolvedPointcutDefinition toAdd =
(ResolvedPointcutDefinition) i.next();
//System.err.println("adding: " + toAdd);
for (Iterator j = acc.iterator(); j.hasNext();) {
ResolvedPointcutDefinition existing =
(ResolvedPointcutDefinition) j.next();
if (existing == toAdd) continue;
if (!isVisible(existing.getModifiers(),
existing.getDeclaringType().resolve(getWorld()),
this)) {
continue;
}
if (conflictingSignature(existing, toAdd)) {
if (isOverriding) {
checkLegalOverride(existing, toAdd);
j.remove();
} else {
getWorld().showMessage(
IMessage.ERROR,
WeaverMessages.format(WeaverMessages.CONFLICTING_INHERITED_POINTCUTS,this.getName() + toAdd.getSignature()),
existing.getSourceLocation(),
toAdd.getSourceLocation());
j.remove();
}
}
}
acc.add(toAdd);
}
}
public ISourceLocation getSourceLocation() { return null; }
public boolean isExposedToWeaver() { return false; }
public WeaverStateInfo getWeaverState() {
return null;
}
/**
* Overridden by ReferenceType to return a sensible answer for parameterized and raw types.
* @return
*/
public ResolvedType getGenericType() {
if (!(isParameterizedType() || isRawType()))
throw new BCException("The type "+getBaseName()+" is not parameterized or raw - it has no generic type");
return null;
}
public ResolvedType parameterizedWith(UnresolvedType[] typeParameters) {
if (!(isGenericType() || isParameterizedType())) return this;
return TypeFactory.createParameterizedType(this.getGenericType(), typeParameters, getWorld());
}
/**
* Iff I am a parameterized type, and any of my parameters are type variable
* references, return a version with those type parameters replaced in accordance
* with the passed bindings.
*/
public UnresolvedType parameterize(Map typeBindings) {
if (!isParameterizedType()) throw new IllegalStateException("Can't parameterize a type that is not a parameterized type");
boolean workToDo = false;
for (int i = 0; i < typeParameters.length; i++) {
if (typeParameters[i].isTypeVariableReference()) {
workToDo = true;
}
}
if (!workToDo) {
return this;
} else {
UnresolvedType[] newTypeParams = new UnresolvedType[typeParameters.length];
for (int i = 0; i < newTypeParams.length; i++) {
newTypeParams[i] = typeParameters[i];
if (newTypeParams[i].isTypeVariableReference()) {
TypeVariableReferenceType tvrt = (TypeVariableReferenceType) newTypeParams[i];
UnresolvedType binding = (UnresolvedType) typeBindings.get(tvrt.getTypeVariable().getName());
if (binding != null) newTypeParams[i] = binding;
}
}
return TypeFactory.createParameterizedType(getGenericType(), newTypeParams, getWorld());
}
}
public boolean hasParameterizedSuperType() {
getParameterizedSuperTypes();
return parameterizedSuperTypes.length > 0;
}
public boolean hasGenericSuperType() {
ResolvedType[] superTypes = getDeclaredInterfaces();
for (int i = 0; i < superTypes.length; i++) {
if (superTypes[i].isGenericType()) return true;
}
return false;
}
private ResolvedType[] parameterizedSuperTypes = null;
/**
* Similar to the above method, but accumulates the super types
* @return
*/
public ResolvedType[] getParameterizedSuperTypes() {
if (parameterizedSuperTypes != null) return parameterizedSuperTypes;
List accumulatedTypes = new ArrayList();
accumulateParameterizedSuperTypes(this,accumulatedTypes);
ResolvedType[] ret = new ResolvedType[accumulatedTypes.size()];
parameterizedSuperTypes = (ResolvedType[]) accumulatedTypes.toArray(ret);
return parameterizedSuperTypes;
}
private void accumulateParameterizedSuperTypes(ResolvedType forType, List parameterizedTypeList) {
if (forType.isParameterizedType()) {
parameterizedTypeList.add(forType);
}
if (forType.getSuperclass() != null) {
accumulateParameterizedSuperTypes(forType.getSuperclass(), parameterizedTypeList);
}
ResolvedType[] interfaces = forType.getDeclaredInterfaces();
for (int i = 0; i < interfaces.length; i++) {
accumulateParameterizedSuperTypes(interfaces[i], parameterizedTypeList);
}
}
/**
* Types may have pointcuts just as they have methods and fields.
*/
public ResolvedPointcutDefinition findPointcut(String name, World world) {
throw new UnsupportedOperationException("Not yet implemenented");
}
/**
* Determines if variables of this type could be assigned values of another
* with lots of help.
* java.lang.Object is convertable from all types.
* A primitive type is convertable from X iff it's assignable from X.
* A reference type is convertable from X iff it's coerceable from X.
* In other words, X isConvertableFrom Y iff the compiler thinks that _some_ value of Y
* could be assignable to a variable of type X without loss of precision.
*
* @param other the other type
* @param world the {@link World} in which the possible assignment should be checked.
* @return true iff variables of this type could be assigned values of other with possible conversion
*/
public final boolean isConvertableFrom(ResolvedType other) {
// // version from TypeX
// if (this.equals(OBJECT)) return true;
// if (this.isPrimitiveType() || other.isPrimitiveType()) return this.isAssignableFrom(other);
// return this.isCoerceableFrom(other);
//
// version from ResolvedTypeX
if (this.equals(OBJECT)) return true;
if (world.isInJava5Mode()) {
if (this.isPrimitiveType()^other.isPrimitiveType()) { // If one is primitive and the other isnt
if (validBoxing.contains(this.getSignature()+other.getSignature())) return true;
}
}
if (this.isPrimitiveType() || other.isPrimitiveType()) return this.isAssignableFrom(other);
return this.isCoerceableFrom(other);
}
/**
* Determines if the variables of this type could be assigned values
* of another type without casting. This still allows for assignment conversion
* as per JLS 2ed 5.2. For object types, this means supertypeOrEqual(THIS, OTHER).
*
* @param other the other type
* @param world the {@link World} in which the possible assignment should be checked.
* @return true iff variables of this type could be assigned values of other without casting
* @exception NullPointerException if other is null
*/
public abstract boolean isAssignableFrom(ResolvedType other);
/**
* Determines if values of another type could possibly be cast to
* this type. The rules followed are from JLS 2ed 5.5, "Casting Conversion".
*
* <p> This method should be commutative, i.e., for all UnresolvedType a, b and all World w:
*
* <blockquote><pre>
* a.isCoerceableFrom(b, w) == b.isCoerceableFrom(a, w)
* </pre></blockquote>
*
* @param other the other type
* @param world the {@link World} in which the possible coersion should be checked.
* @return true iff values of other could possibly be cast to this type.
* @exception NullPointerException if other is null.
*/
public abstract boolean isCoerceableFrom(ResolvedType other);
public boolean needsNoConversionFrom(ResolvedType o) {
return isAssignableFrom(o);
}
/**
* Implemented by ReferenceTypes
*/
public String getSignatureForAttribute() {
throw new RuntimeException("Cannot ask this type "+this+" for a generic sig attribute");
}
private FuzzyBoolean parameterizedWithAMemberTypeVariable = FuzzyBoolean.MAYBE;
/**
* return true if the parameterization of this type includes a member type variable. Member
* type variables occur in generic methods/ctors.
*/
public boolean isParameterizedWithAMemberTypeVariable() {
// MAYBE means we haven't worked it out yet...
if (parameterizedWithAMemberTypeVariable==FuzzyBoolean.MAYBE) {
// if there are no type parameters then we cant be...
if (typeParameters==null || typeParameters.length==0) {
parameterizedWithAMemberTypeVariable = FuzzyBoolean.NO;
return false;
}
for (int i = 0; i < typeParameters.length; i++) {
UnresolvedType aType = (ResolvedType)typeParameters[i];
if (aType.isTypeVariableReference() && ((TypeVariableReference)aType).getTypeVariable().getDeclaringElementKind()==TypeVariable.METHOD) {
parameterizedWithAMemberTypeVariable = FuzzyBoolean.YES;
return true;
}
if (aType.isParameterizedType()) {
boolean b = aType.isParameterizedWithAMemberTypeVariable();
if (b) {
parameterizedWithAMemberTypeVariable = FuzzyBoolean.YES;
return true;
}
}
if (aType.isGenericWildcard()) {
if (aType.isExtends()) {
boolean b = false;
UnresolvedType upperBound = aType.getUpperBound();
if (upperBound.isParameterizedType()) {
b = upperBound.isParameterizedWithAMemberTypeVariable();
} else if (upperBound.isTypeVariableReference() && ((TypeVariableReference)upperBound).getTypeVariable().getDeclaringElementKind()==TypeVariable.METHOD) {
b = true;
}
if (b) {
parameterizedWithAMemberTypeVariable = FuzzyBoolean.YES;
return true;
}
// FIXME asc need to check additional interface bounds
}
if (aType.isSuper()) {
boolean b = false;
UnresolvedType lowerBound = aType.getLowerBound();
if (lowerBound.isParameterizedType()) {
b = lowerBound.isParameterizedWithAMemberTypeVariable();
} else if (lowerBound.isTypeVariableReference() && ((TypeVariableReference)lowerBound).getTypeVariable().getDeclaringElementKind()==TypeVariable.METHOD) {
b = true;
}
if (b) {
parameterizedWithAMemberTypeVariable = FuzzyBoolean.YES;
return true;
}
}
}
}
parameterizedWithAMemberTypeVariable=FuzzyBoolean.NO;
}
return parameterizedWithAMemberTypeVariable.alwaysTrue();
}
}
| false | true | public boolean checkLegalOverride(ResolvedMember parent, ResolvedMember child) {
//System.err.println("check: " + child.getDeclaringType() + " overrides " + parent.getDeclaringType());
if (Modifier.isFinal(parent.getModifiers())) {
world.showMessage(Message.ERROR,
WeaverMessages.format(WeaverMessages.CANT_OVERRIDE_FINAL_MEMBER,parent),
child.getSourceLocation(),null);
return false;
}
if (!parent.getReturnType().equals(child.getReturnType())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_RETURN_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
if (parent.getKind() == Member.POINTCUT) {
UnresolvedType[] pTypes = parent.getParameterTypes();
UnresolvedType[] cTypes = child.getParameterTypes();
if (!Arrays.equals(pTypes, cTypes)) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_PARAM_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
}
//System.err.println("check: " + child.getModifiers() + " more visible " + parent.getModifiers());
if (isMoreVisible(parent.getModifiers(), child.getModifiers())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_VISIBILITY_REDUCTION,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
// check declared exceptions
ResolvedType[] childExceptions = world.resolve(child.getExceptions());
ResolvedType[] parentExceptions = world.resolve(parent.getExceptions());
ResolvedType runtimeException = world.resolve("java.lang.RuntimeException");
ResolvedType error = world.resolve("java.lang.Error");
outer: for (int i=0, leni = childExceptions.length; i < leni; i++) {
//System.err.println("checking: " + childExceptions[i]);
if (runtimeException.isAssignableFrom(childExceptions[i])) continue;
if (error.isAssignableFrom(childExceptions[i])) continue;
for (int j = 0, lenj = parentExceptions.length; j < lenj; j++) {
if (parentExceptions[j].isAssignableFrom(childExceptions[i])) continue outer;
}
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_DOESNT_THROW,childExceptions[i].getName()),
child.getSourceLocation(), null);
return false;
}
if (parent.isStatic() && !child.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERRIDDEN_STATIC,child,parent),
child.getSourceLocation(),null);
} else if (child.isStatic() && !parent.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERIDDING_STATIC,child,parent),
child.getSourceLocation(),null);
}
return true;
}
| public boolean checkLegalOverride(ResolvedMember parent, ResolvedMember child) {
//System.err.println("check: " + child.getDeclaringType() + " overrides " + parent.getDeclaringType());
if (Modifier.isFinal(parent.getModifiers())) {
world.showMessage(Message.ERROR,
WeaverMessages.format(WeaverMessages.CANT_OVERRIDE_FINAL_MEMBER,parent),
child.getSourceLocation(),null);
return false;
}
if (!parent.getReturnType().equals(child.getReturnType())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_RETURN_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
if (parent.getKind() == Member.POINTCUT) {
UnresolvedType[] pTypes = parent.getParameterTypes();
UnresolvedType[] cTypes = child.getParameterTypes();
if (!Arrays.equals(pTypes, cTypes)) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_PARAM_TYPE_MISMATCH,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
}
//System.err.println("check: " + child.getModifiers() + " more visible " + parent.getModifiers());
if (isMoreVisible(parent.getModifiers(), child.getModifiers())) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_VISIBILITY_REDUCTION,parent,child),
child.getSourceLocation(), parent.getSourceLocation());
return false;
}
// check declared exceptions
ResolvedType[] childExceptions = world.resolve(child.getExceptions());
ResolvedType[] parentExceptions = world.resolve(parent.getExceptions());
ResolvedType runtimeException = world.resolve("java.lang.RuntimeException");
ResolvedType error = world.resolve("java.lang.Error");
outer: for (int i=0, leni = childExceptions.length; i < leni; i++) {
//System.err.println("checking: " + childExceptions[i]);
if (runtimeException.isAssignableFrom(childExceptions[i])) continue;
if (error.isAssignableFrom(childExceptions[i])) continue;
for (int j = 0, lenj = parentExceptions.length; j < lenj; j++) {
if (parentExceptions[j].isAssignableFrom(childExceptions[i])) continue outer;
}
// this message is now better handled my MethodVerifier in JDT core.
// world.showMessage(IMessage.ERROR,
// WeaverMessages.format(WeaverMessages.ITD_DOESNT_THROW,childExceptions[i].getName()),
// child.getSourceLocation(), null);
return false;
}
if (parent.isStatic() && !child.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERRIDDEN_STATIC,child,parent),
child.getSourceLocation(),null);
return false;
} else if (child.isStatic() && !parent.isStatic()) {
world.showMessage(IMessage.ERROR,
WeaverMessages.format(WeaverMessages.ITD_OVERIDDING_STATIC,child,parent),
child.getSourceLocation(),null);
return false;
}
return true;
}
|
diff --git a/src/com/manuelmaly/hn/parser/HNCommentsParser.java b/src/com/manuelmaly/hn/parser/HNCommentsParser.java
index 61930ff..c5cb75f 100644
--- a/src/com/manuelmaly/hn/parser/HNCommentsParser.java
+++ b/src/com/manuelmaly/hn/parser/HNCommentsParser.java
@@ -1,107 +1,107 @@
package com.manuelmaly.hn.parser;
import java.util.ArrayList;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.manuelmaly.hn.App;
import com.manuelmaly.hn.Settings;
import com.manuelmaly.hn.model.HNComment;
import com.manuelmaly.hn.model.HNPostComments;
import com.manuelmaly.hn.util.HNHelper;
public class HNCommentsParser extends BaseHTMLParser<HNPostComments> {
@Override
public HNPostComments parseDocument(Element doc) throws Exception {
if (doc == null)
return new HNPostComments();
ArrayList<HNComment> comments = new ArrayList<HNComment>();
Elements tableRows = doc.select("table tr table tr:has(table)");
String currentUser = Settings.getUserName(App.getInstance());
String text = null;
String author = null;
int level = 0;
String timeAgo = null;
String url = null;
Boolean isDownvoted = false;
String upvoteUrl = null;
String downvoteUrl = null;
boolean endParsing = false;
for (int row = 0; row < tableRows.size(); row++) {
Element mainRowElement = tableRows.get(row).select("td:eq(2)").first();
Element rowLevelElement = tableRows.get(row).select("td:eq(0)").first();
if (mainRowElement == null)
- break;
+ continue;
// The not portion of this query is meant to remove the reply link
// from the text. As far as I can tell that is the only place
// where size=1 is used. If that turns out to not be the case then
// searching for u tags is also a pretty decent option - @jmaltz
text = mainRowElement.select("span.comment > *:not(:has(font[size=1]))").html();
Element comHeadElement = mainRowElement.select("span.comhead").first();
author = comHeadElement.select("a[href*=user]").text();
String timeAgoRaw = getFirstTextValueInElementChildren(comHeadElement);
if (timeAgoRaw.length() > 0)
timeAgo = timeAgoRaw.substring(0, timeAgoRaw.indexOf("|"));
Element urlElement = comHeadElement.select("a[href*=item]").first();
if (urlElement != null)
url = urlElement.attr("href");
String levelSpacerWidth = rowLevelElement.select("img").first().attr("width");
if (levelSpacerWidth != null)
level = Integer.parseInt(levelSpacerWidth) / 40;
Elements voteElements = tableRows.get(row).select("td:eq(1) a");
upvoteUrl = getVoteUrl(voteElements.first(), currentUser);
// We want to test for size because unlike first() calling .get(1)
// Will throw an error if there are not two elements
if (voteElements.size() > 1)
downvoteUrl = getVoteUrl(voteElements.get(1), currentUser);
comments.add(new HNComment(timeAgo, author, url, text, level, isDownvoted, upvoteUrl, downvoteUrl));
if (endParsing)
break;
}
// Just using table:eq(0) would return an extra table, so we use
// get(0) instead, which only returns only the one we want
Element header = doc.select("body table:eq(0) tbody > tr:eq(2) > td:eq(0) > table").get(0);
String headerHtml = null;
// Five table rows is what it takes for the title, post information
// And other boilerplate stuff. More than five means we have something
// Special
if(header.select("tr").size() > 5) {
HeaderParser headerParser = new HeaderParser();
headerHtml = headerParser.parseDocument(header);
}
return new HNPostComments(comments, headerHtml, currentUser);
}
/**
* Parses out the url for voting from a given element
* @param voteElement The element from which to parse out the voting url
* @param currentUser The currently logged in user
* @return The relative url to vote in the given direction for that comment
*/
private String getVoteUrl(Element voteElement, String currentUser) {
if (voteElement != null) {
return voteElement.attr("href").contains(currentUser) ?
HNHelper.resolveRelativeHNURL(voteElement.attr("href")) : null;
}
return null;
}
}
| true | true | public HNPostComments parseDocument(Element doc) throws Exception {
if (doc == null)
return new HNPostComments();
ArrayList<HNComment> comments = new ArrayList<HNComment>();
Elements tableRows = doc.select("table tr table tr:has(table)");
String currentUser = Settings.getUserName(App.getInstance());
String text = null;
String author = null;
int level = 0;
String timeAgo = null;
String url = null;
Boolean isDownvoted = false;
String upvoteUrl = null;
String downvoteUrl = null;
boolean endParsing = false;
for (int row = 0; row < tableRows.size(); row++) {
Element mainRowElement = tableRows.get(row).select("td:eq(2)").first();
Element rowLevelElement = tableRows.get(row).select("td:eq(0)").first();
if (mainRowElement == null)
break;
// The not portion of this query is meant to remove the reply link
// from the text. As far as I can tell that is the only place
// where size=1 is used. If that turns out to not be the case then
// searching for u tags is also a pretty decent option - @jmaltz
text = mainRowElement.select("span.comment > *:not(:has(font[size=1]))").html();
Element comHeadElement = mainRowElement.select("span.comhead").first();
author = comHeadElement.select("a[href*=user]").text();
String timeAgoRaw = getFirstTextValueInElementChildren(comHeadElement);
if (timeAgoRaw.length() > 0)
timeAgo = timeAgoRaw.substring(0, timeAgoRaw.indexOf("|"));
Element urlElement = comHeadElement.select("a[href*=item]").first();
if (urlElement != null)
url = urlElement.attr("href");
String levelSpacerWidth = rowLevelElement.select("img").first().attr("width");
if (levelSpacerWidth != null)
level = Integer.parseInt(levelSpacerWidth) / 40;
Elements voteElements = tableRows.get(row).select("td:eq(1) a");
upvoteUrl = getVoteUrl(voteElements.first(), currentUser);
// We want to test for size because unlike first() calling .get(1)
// Will throw an error if there are not two elements
if (voteElements.size() > 1)
downvoteUrl = getVoteUrl(voteElements.get(1), currentUser);
comments.add(new HNComment(timeAgo, author, url, text, level, isDownvoted, upvoteUrl, downvoteUrl));
if (endParsing)
break;
}
// Just using table:eq(0) would return an extra table, so we use
// get(0) instead, which only returns only the one we want
Element header = doc.select("body table:eq(0) tbody > tr:eq(2) > td:eq(0) > table").get(0);
String headerHtml = null;
// Five table rows is what it takes for the title, post information
// And other boilerplate stuff. More than five means we have something
// Special
if(header.select("tr").size() > 5) {
HeaderParser headerParser = new HeaderParser();
headerHtml = headerParser.parseDocument(header);
}
return new HNPostComments(comments, headerHtml, currentUser);
}
| public HNPostComments parseDocument(Element doc) throws Exception {
if (doc == null)
return new HNPostComments();
ArrayList<HNComment> comments = new ArrayList<HNComment>();
Elements tableRows = doc.select("table tr table tr:has(table)");
String currentUser = Settings.getUserName(App.getInstance());
String text = null;
String author = null;
int level = 0;
String timeAgo = null;
String url = null;
Boolean isDownvoted = false;
String upvoteUrl = null;
String downvoteUrl = null;
boolean endParsing = false;
for (int row = 0; row < tableRows.size(); row++) {
Element mainRowElement = tableRows.get(row).select("td:eq(2)").first();
Element rowLevelElement = tableRows.get(row).select("td:eq(0)").first();
if (mainRowElement == null)
continue;
// The not portion of this query is meant to remove the reply link
// from the text. As far as I can tell that is the only place
// where size=1 is used. If that turns out to not be the case then
// searching for u tags is also a pretty decent option - @jmaltz
text = mainRowElement.select("span.comment > *:not(:has(font[size=1]))").html();
Element comHeadElement = mainRowElement.select("span.comhead").first();
author = comHeadElement.select("a[href*=user]").text();
String timeAgoRaw = getFirstTextValueInElementChildren(comHeadElement);
if (timeAgoRaw.length() > 0)
timeAgo = timeAgoRaw.substring(0, timeAgoRaw.indexOf("|"));
Element urlElement = comHeadElement.select("a[href*=item]").first();
if (urlElement != null)
url = urlElement.attr("href");
String levelSpacerWidth = rowLevelElement.select("img").first().attr("width");
if (levelSpacerWidth != null)
level = Integer.parseInt(levelSpacerWidth) / 40;
Elements voteElements = tableRows.get(row).select("td:eq(1) a");
upvoteUrl = getVoteUrl(voteElements.first(), currentUser);
// We want to test for size because unlike first() calling .get(1)
// Will throw an error if there are not two elements
if (voteElements.size() > 1)
downvoteUrl = getVoteUrl(voteElements.get(1), currentUser);
comments.add(new HNComment(timeAgo, author, url, text, level, isDownvoted, upvoteUrl, downvoteUrl));
if (endParsing)
break;
}
// Just using table:eq(0) would return an extra table, so we use
// get(0) instead, which only returns only the one we want
Element header = doc.select("body table:eq(0) tbody > tr:eq(2) > td:eq(0) > table").get(0);
String headerHtml = null;
// Five table rows is what it takes for the title, post information
// And other boilerplate stuff. More than five means we have something
// Special
if(header.select("tr").size() > 5) {
HeaderParser headerParser = new HeaderParser();
headerHtml = headerParser.parseDocument(header);
}
return new HNPostComments(comments, headerHtml, currentUser);
}
|
diff --git a/main/Main.java b/main/Main.java
index 2592729..2b100bc 100644
--- a/main/Main.java
+++ b/main/Main.java
@@ -1,250 +1,250 @@
package main;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import optimization.CascadeSelects;
import optimization.DetectJoins;
import optimization.IOptimization;
import optimization.MoveProjection;
import optimization.MoveSelection;
import parser.gene.ParseException;
import parser.gene.SimpleSQLParser;
import parser.syntaxtree.CompilationUnit;
import parser.visitor.ObjectDepthFirst;
import relationenalgebra.CrossProduct;
import relationenalgebra.IOneChildNode;
import relationenalgebra.ITreeNode;
import relationenalgebra.ITwoChildNode;
import relationenalgebra.Join;
import relationenalgebra.Projection;
import relationenalgebra.Relation;
import relationenalgebra.Selection;
import relationenalgebra.TableOperation;
import database.FileSystemDatabase;
import database.Table;
public class Main {
// Verzeichnis der Buchversandsdatenbank
public static final String KUNDENDB = "db";
public static void main(String[] args) throws IOException,
ClassNotFoundException {
//Logger.debug = true;
//Logger.debug("DEBUGGING IS ENABLED");
Logger.debug("load database");
FileSystemDatabase.getInstance().setDbDirectory(KUNDENDB);
Main.createKundenDB();
Logger.debug("execute sql");
//Main.execute("select B.Titel from Buch_Autor as BA, Buch as B where BA.Autorenname=\"Christian Ullenboom\" and BA.B_ID=B.ID");
//Main.execute("select B.Titel from Buch_Autor as BA, Buch as B where BA.Autorenname=\"Henning Mankell\" and BA.B_ID=B.ID");
//Main.execute("select B.Titel from Buch as B, Kunde as K, Buch_Bestellung as BB, Kunde_Bestellung as KB where K.Name=\"KName1\" and K.ID=KB.K_ID and KB.B_ID=BB.Be_ID and BB.Bu_ID=B.ID");
//Main.readFile("sql.txt");
Main.blockTwoOptimizationDemo();
//Main.printKundenDB();
//FileSystemDatabase.getInstance().persistDb();
}
public static void blockTwoOptimizationDemo() {
String[] queries = new String[]{
"select B.Titel \n" +
"from \n" +
" Buch as B, \n" +
" Kunde as K, \n" +
" Buch_Bestellung as BB,\n" +
" Kunde_Bestellung as KB \n" +
"where \n" +
" K.Name=\"KName1\" and \n" +
" K.ID=KB.K_ID and \n" +
" KB.B_ID=BB.Be_ID and \n" +
" BB.Bu_ID=B.ID",
"select B.ID, K.Name \n" +
"from\n" +
" Bestellung as B, \n" +
" Kunde as K, \n" +
- " Kunde_Bestellung as KB, \n" +
+ " Kunde_Bestellung as KB \n" +
"where \n" +
" KB.K_ID=K.ID and \n" +
" KB.B_ID=B.ID and \n" +
" B.ID=\"Bestellung5\"",
"select Name \n" +
"from \n" +
" Kunde, \n" +
" Kunde_Bestellung \n" +
"where \n" +
" ID=K_ID and \n" +
" Name=\"KName1\"",
};
IOptimization cascadeSelects = new CascadeSelects();
IOptimization detectJoins = new DetectJoins();
IOptimization moveSelection = new MoveSelection();
IOptimization moveProjection = new MoveProjection();
String[] titles = new String[]{
"Result with no optimizations: ",
"Result with cascaded and moved selections: ",
"Result with cascaded and moved selections and detected joins: ",
"Result with cascaded and moved selections, detected joins and moved projections: ",
};
IOptimization[][] optimizationLists = new IOptimization[][]{
new IOptimization[0],
new IOptimization[]{ cascadeSelects, moveSelection },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins, moveProjection },
};
for (String query : queries) {
System.out.println("Next query: ");
System.out.println(query);
System.out.println();
for (int i = 0; i < titles.length; i++) {
System.out.println(titles[i]);
Table result = executeOptimized(query, optimizationLists[i]);
if (result == null) {
System.out.println("(no result, optimization failed)");
} else {
System.out.println(result.toString());
}
System.out.println();
}
}
}
private static Table executeOptimized(String query, IOptimization[] optimizations) {
ITreeNode plan = sqlToRelationenAlgebra(query);
if (plan == null) {
System.err.println("failed to parse query");
return null;
}
System.out.println("parsed plan: ");
System.out.println(plan.toString());
for (IOptimization optimization : optimizations) {
try {
plan = optimization.optimize(plan);
} catch (Exception e) {
System.err.println("failed to optimize query using "+optimization);
System.err.println(e.getMessage());
e.printStackTrace();
return null;
}
}
System.out.println("optimized plan:");
System.out.println(plan);
try {
return executeQuery(plan);
} catch (Exception e) {
System.err.println("failed to execute query");
System.err.println(e.getMessage());
e.printStackTrace();
return null;
}
}
public static void printKundenDB() throws IOException,
ClassNotFoundException {
FileSystemDatabase.getInstance().printDb();
}
public static void createKundenDB() {
Logger.debug("create kunden db");
Main.readFile("kundendb.txt");
}
public static void execute(String simpleSQL) {
ITreeNode plan = Main.sqlToRelationenAlgebra(simpleSQL);
Main.executePlan(plan);
}
public static ITreeNode sqlToRelationenAlgebra(String simpleSQL) {
SimpleSQLParser parser = new SimpleSQLParser(
new StringReader(simpleSQL));
parser.setDebugALL(Logger.debug);
Logger.debug("parsing: "+simpleSQL);
CompilationUnit cu = null;
try {
cu = parser.CompilationUnit();
ObjectDepthFirst v = new ObjectDepthFirst();
cu.accept(v, null);
} catch (ParseException e) {
System.err.println(e.getMessage());
return null;
}
return (ITreeNode) cu.accept(new AlgebraVisitor(), null);
}
private static void executePlan(ITreeNode plan) {
if (plan instanceof TableOperation)
((TableOperation) plan).execute();
else {
Logger.debug("QUERY: "+plan);
Table result = executeQuery(plan);
Logger.debug("QUERY RESULT: ");
Logger.debug(result.toString());
}
}
private static Table executeQuery(ITreeNode query) {
if (query instanceof ITwoChildNode) {
Table child1Result = executeQuery(((ITwoChildNode)query).getChild());
Table child2Result = executeQuery(((ITwoChildNode)query).getSecondChild());
if (query instanceof CrossProduct)
return child1Result.cross(child2Result);
if (query instanceof Join)
return child1Result.join(child2Result, ((Join)query).getExpr());
} else if (query instanceof IOneChildNode) {
Table childResult = executeQuery(((IOneChildNode)query).getChild());
if (query instanceof Projection)
return childResult.projectTo(((Projection)query).getColumnnames());
if (query instanceof Selection)
return childResult.select(((Selection)query).getExpr());
} else if (query instanceof Relation) {
Relation r = (Relation)query;
Table t = FileSystemDatabase.getInstance().getTable(r.getName());
t.setAlias(r.getAlias());
return t;
}
throw new IllegalArgumentException("unknown node type: "+query);
}
private static void readFile(String filename) {
File f = new File(filename);
if (!f.isFile())
return;
try {
// Open the file that is the first
// command line parameter
FileInputStream fstream = new FileInputStream(filename);
// Get the object of DataInputStream
DataInputStream in = new DataInputStream(fstream);
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String strLine;
// Read File Line By Line
while ((strLine = br.readLine()) != null) {
// Print the content on the console
if (!strLine.equals("\n") && !strLine.equals(""))
Main.execute(strLine);
}
// Close the input stream
in.close();
} catch (Exception e) {// Catch exception if any
throw new RuntimeException(e);
}
}
}
| true | true | public static void blockTwoOptimizationDemo() {
String[] queries = new String[]{
"select B.Titel \n" +
"from \n" +
" Buch as B, \n" +
" Kunde as K, \n" +
" Buch_Bestellung as BB,\n" +
" Kunde_Bestellung as KB \n" +
"where \n" +
" K.Name=\"KName1\" and \n" +
" K.ID=KB.K_ID and \n" +
" KB.B_ID=BB.Be_ID and \n" +
" BB.Bu_ID=B.ID",
"select B.ID, K.Name \n" +
"from\n" +
" Bestellung as B, \n" +
" Kunde as K, \n" +
" Kunde_Bestellung as KB, \n" +
"where \n" +
" KB.K_ID=K.ID and \n" +
" KB.B_ID=B.ID and \n" +
" B.ID=\"Bestellung5\"",
"select Name \n" +
"from \n" +
" Kunde, \n" +
" Kunde_Bestellung \n" +
"where \n" +
" ID=K_ID and \n" +
" Name=\"KName1\"",
};
IOptimization cascadeSelects = new CascadeSelects();
IOptimization detectJoins = new DetectJoins();
IOptimization moveSelection = new MoveSelection();
IOptimization moveProjection = new MoveProjection();
String[] titles = new String[]{
"Result with no optimizations: ",
"Result with cascaded and moved selections: ",
"Result with cascaded and moved selections and detected joins: ",
"Result with cascaded and moved selections, detected joins and moved projections: ",
};
IOptimization[][] optimizationLists = new IOptimization[][]{
new IOptimization[0],
new IOptimization[]{ cascadeSelects, moveSelection },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins, moveProjection },
};
for (String query : queries) {
System.out.println("Next query: ");
System.out.println(query);
System.out.println();
for (int i = 0; i < titles.length; i++) {
System.out.println(titles[i]);
Table result = executeOptimized(query, optimizationLists[i]);
if (result == null) {
System.out.println("(no result, optimization failed)");
} else {
System.out.println(result.toString());
}
System.out.println();
}
}
}
| public static void blockTwoOptimizationDemo() {
String[] queries = new String[]{
"select B.Titel \n" +
"from \n" +
" Buch as B, \n" +
" Kunde as K, \n" +
" Buch_Bestellung as BB,\n" +
" Kunde_Bestellung as KB \n" +
"where \n" +
" K.Name=\"KName1\" and \n" +
" K.ID=KB.K_ID and \n" +
" KB.B_ID=BB.Be_ID and \n" +
" BB.Bu_ID=B.ID",
"select B.ID, K.Name \n" +
"from\n" +
" Bestellung as B, \n" +
" Kunde as K, \n" +
" Kunde_Bestellung as KB \n" +
"where \n" +
" KB.K_ID=K.ID and \n" +
" KB.B_ID=B.ID and \n" +
" B.ID=\"Bestellung5\"",
"select Name \n" +
"from \n" +
" Kunde, \n" +
" Kunde_Bestellung \n" +
"where \n" +
" ID=K_ID and \n" +
" Name=\"KName1\"",
};
IOptimization cascadeSelects = new CascadeSelects();
IOptimization detectJoins = new DetectJoins();
IOptimization moveSelection = new MoveSelection();
IOptimization moveProjection = new MoveProjection();
String[] titles = new String[]{
"Result with no optimizations: ",
"Result with cascaded and moved selections: ",
"Result with cascaded and moved selections and detected joins: ",
"Result with cascaded and moved selections, detected joins and moved projections: ",
};
IOptimization[][] optimizationLists = new IOptimization[][]{
new IOptimization[0],
new IOptimization[]{ cascadeSelects, moveSelection },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins },
new IOptimization[]{ cascadeSelects, moveSelection, detectJoins, moveProjection },
};
for (String query : queries) {
System.out.println("Next query: ");
System.out.println(query);
System.out.println();
for (int i = 0; i < titles.length; i++) {
System.out.println(titles[i]);
Table result = executeOptimized(query, optimizationLists[i]);
if (result == null) {
System.out.println("(no result, optimization failed)");
} else {
System.out.println(result.toString());
}
System.out.println();
}
}
}
|
diff --git a/grails/src/web/org/codehaus/groovy/grails/web/mapping/filter/UrlMappingsFilter.java b/grails/src/web/org/codehaus/groovy/grails/web/mapping/filter/UrlMappingsFilter.java
index 65662fb9b..c5a6c08ff 100644
--- a/grails/src/web/org/codehaus/groovy/grails/web/mapping/filter/UrlMappingsFilter.java
+++ b/grails/src/web/org/codehaus/groovy/grails/web/mapping/filter/UrlMappingsFilter.java
@@ -1,188 +1,188 @@
/* Copyright 2004-2005 Graeme Rocher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.mapping.filter;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.groovy.grails.commons.ControllerArtefactHandler;
import org.codehaus.groovy.grails.commons.GrailsApplication;
import org.codehaus.groovy.grails.commons.GrailsClass;
import org.codehaus.groovy.grails.web.mapping.UrlMappingInfo;
import org.codehaus.groovy.grails.web.mapping.UrlMappingsHolder;
import org.codehaus.groovy.grails.web.servlet.GrailsUrlPathHelper;
import org.codehaus.groovy.grails.web.servlet.WrappedResponseHolder;
import org.codehaus.groovy.grails.web.servlet.GrailsApplicationAttributes;
import org.codehaus.groovy.grails.web.servlet.mvc.GrailsWebRequest;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.springframework.web.filter.OncePerRequestFilter;
import org.springframework.web.util.UrlPathHelper;
import org.springframework.web.util.WebUtils;
import javax.servlet.FilterChain;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* <p>A Servlet filter that uses the Grails UrlMappings to match and forward requests to a relevant controller
* and action
*
* @author Graeme Rocher
* @since 0.5
*
*
* <p/>
* Created: Mar 6, 2007
* Time: 7:58:19 AM
*/
public class UrlMappingsFilter extends OncePerRequestFilter {
private UrlPathHelper urlHelper = new UrlPathHelper();
private static final char SLASH = '/';
private static final Log LOG = LogFactory.getLog(UrlMappingsFilter.class);
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
UrlMappingsHolder holder = lookupUrlMappings();
GrailsApplication application = lookupApplication();
GrailsWebRequest webRequest = (GrailsWebRequest)request.getAttribute(GrailsApplicationAttributes.WEB_REQUEST);
GrailsClass[] controllers = application.getArtefacts(ControllerArtefactHandler.TYPE);
if(controllers == null || controllers.length == 0 || holder == null) {
processFilterChain(request, response, filterChain);
return;
}
if(LOG.isDebugEnabled()) {
LOG.debug("Executing URL mapping filter...");
LOG.debug(holder);
}
String uri = urlHelper.getPathWithinApplication(request);
UrlMappingInfo[] urlInfos = holder.matchAll(uri);
WrappedResponseHolder.setWrappedResponse(response);
boolean dispatched = false;
try {
for (int i = 0; i < urlInfos.length; i++) {
UrlMappingInfo info = urlInfos[i];
if(info!=null) {
String action = info.getActionName() == null ? "" : info.getActionName();
+ info.configure(webRequest);
GrailsClass controller = application.getArtefactForFeature(ControllerArtefactHandler.TYPE, SLASH + info.getControllerName() + SLASH + action);
if(controller == null) {
continue;
}
dispatched = true;
- info.configure(webRequest);
String forwardUrl = buildDispatchUrlForMapping(request, info);
if(LOG.isDebugEnabled()) {
LOG.debug("Matched URI ["+uri+"] to URL mapping ["+info+"], forwarding to ["+forwardUrl+"] with response ["+response.getClass()+"]");
}
//populateParamsForMapping(info);
RequestDispatcher dispatcher = request.getRequestDispatcher(forwardUrl);
populateWebRequestWithInfo(webRequest, info);
WebUtils.exposeForwardRequestAttributes(request);
dispatcher.forward(request, response);
break;
}
}
}
finally {
WrappedResponseHolder.setWrappedResponse(null);
}
if(!dispatched) {
if(LOG.isDebugEnabled()) {
LOG.debug("No match found, processing remaining filter chain.");
}
processFilterChain(request, response, filterChain);
}
}
private void processFilterChain(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws IOException, ServletException {
try {
WrappedResponseHolder.setWrappedResponse(response);
filterChain.doFilter(request,response);
} finally {
WrappedResponseHolder.setWrappedResponse(null);
}
return;
}
protected static void populateWebRequestWithInfo(GrailsWebRequest webRequest, UrlMappingInfo info) {
if(webRequest != null) {
webRequest.setControllerName(info.getControllerName());
webRequest.setActionName(info.getActionName());
String id = info.getId();
if(!StringUtils.isBlank(id))webRequest.getParams().put(GrailsWebRequest.ID_PARAMETER, id);
}
}
/**
* Constructs the URI to forward to using the given request and UrlMappingInfo instance
*
* @param request The HttpServletRequest
* @param info The UrlMappingInfo
* @return The URI to forward to
*/
protected static String buildDispatchUrlForMapping(HttpServletRequest request, UrlMappingInfo info) {
StringBuffer forwardUrl = new StringBuffer(GrailsUrlPathHelper.GRAILS_SERVLET_PATH);
forwardUrl.append(SLASH)
.append(info.getControllerName());
if(!StringUtils.isBlank(info.getActionName())) {
forwardUrl.append(SLASH)
.append(info.getActionName());
}
forwardUrl.append(GrailsUrlPathHelper.GRAILS_DISPATCH_EXTENSION);
return forwardUrl.toString();
}
/**
* Looks up the UrlMappingsHolder instance
*
* @return The UrlMappingsHolder
*/
protected UrlMappingsHolder lookupUrlMappings() {
WebApplicationContext wac =
WebApplicationContextUtils.getRequiredWebApplicationContext(getServletContext());
return (UrlMappingsHolder)wac.getBean(UrlMappingsHolder.BEAN_ID);
}
/**
* Looks up the GrailsApplication instance
*
* @return The GrailsApplication instance
*/
protected GrailsApplication lookupApplication() {
WebApplicationContext wac =
WebApplicationContextUtils.getRequiredWebApplicationContext(getServletContext());
return (GrailsApplication)wac.getBean(GrailsApplication.APPLICATION_ID);
}
}
| false | true | protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
UrlMappingsHolder holder = lookupUrlMappings();
GrailsApplication application = lookupApplication();
GrailsWebRequest webRequest = (GrailsWebRequest)request.getAttribute(GrailsApplicationAttributes.WEB_REQUEST);
GrailsClass[] controllers = application.getArtefacts(ControllerArtefactHandler.TYPE);
if(controllers == null || controllers.length == 0 || holder == null) {
processFilterChain(request, response, filterChain);
return;
}
if(LOG.isDebugEnabled()) {
LOG.debug("Executing URL mapping filter...");
LOG.debug(holder);
}
String uri = urlHelper.getPathWithinApplication(request);
UrlMappingInfo[] urlInfos = holder.matchAll(uri);
WrappedResponseHolder.setWrappedResponse(response);
boolean dispatched = false;
try {
for (int i = 0; i < urlInfos.length; i++) {
UrlMappingInfo info = urlInfos[i];
if(info!=null) {
String action = info.getActionName() == null ? "" : info.getActionName();
GrailsClass controller = application.getArtefactForFeature(ControllerArtefactHandler.TYPE, SLASH + info.getControllerName() + SLASH + action);
if(controller == null) {
continue;
}
dispatched = true;
info.configure(webRequest);
String forwardUrl = buildDispatchUrlForMapping(request, info);
if(LOG.isDebugEnabled()) {
LOG.debug("Matched URI ["+uri+"] to URL mapping ["+info+"], forwarding to ["+forwardUrl+"] with response ["+response.getClass()+"]");
}
//populateParamsForMapping(info);
RequestDispatcher dispatcher = request.getRequestDispatcher(forwardUrl);
populateWebRequestWithInfo(webRequest, info);
WebUtils.exposeForwardRequestAttributes(request);
dispatcher.forward(request, response);
break;
}
}
}
finally {
WrappedResponseHolder.setWrappedResponse(null);
}
if(!dispatched) {
if(LOG.isDebugEnabled()) {
LOG.debug("No match found, processing remaining filter chain.");
}
processFilterChain(request, response, filterChain);
}
}
| protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws ServletException, IOException {
UrlMappingsHolder holder = lookupUrlMappings();
GrailsApplication application = lookupApplication();
GrailsWebRequest webRequest = (GrailsWebRequest)request.getAttribute(GrailsApplicationAttributes.WEB_REQUEST);
GrailsClass[] controllers = application.getArtefacts(ControllerArtefactHandler.TYPE);
if(controllers == null || controllers.length == 0 || holder == null) {
processFilterChain(request, response, filterChain);
return;
}
if(LOG.isDebugEnabled()) {
LOG.debug("Executing URL mapping filter...");
LOG.debug(holder);
}
String uri = urlHelper.getPathWithinApplication(request);
UrlMappingInfo[] urlInfos = holder.matchAll(uri);
WrappedResponseHolder.setWrappedResponse(response);
boolean dispatched = false;
try {
for (int i = 0; i < urlInfos.length; i++) {
UrlMappingInfo info = urlInfos[i];
if(info!=null) {
String action = info.getActionName() == null ? "" : info.getActionName();
info.configure(webRequest);
GrailsClass controller = application.getArtefactForFeature(ControllerArtefactHandler.TYPE, SLASH + info.getControllerName() + SLASH + action);
if(controller == null) {
continue;
}
dispatched = true;
String forwardUrl = buildDispatchUrlForMapping(request, info);
if(LOG.isDebugEnabled()) {
LOG.debug("Matched URI ["+uri+"] to URL mapping ["+info+"], forwarding to ["+forwardUrl+"] with response ["+response.getClass()+"]");
}
//populateParamsForMapping(info);
RequestDispatcher dispatcher = request.getRequestDispatcher(forwardUrl);
populateWebRequestWithInfo(webRequest, info);
WebUtils.exposeForwardRequestAttributes(request);
dispatcher.forward(request, response);
break;
}
}
}
finally {
WrappedResponseHolder.setWrappedResponse(null);
}
if(!dispatched) {
if(LOG.isDebugEnabled()) {
LOG.debug("No match found, processing remaining filter chain.");
}
processFilterChain(request, response, filterChain);
}
}
|
diff --git a/sandbox/pom-pre-alpha-converter/src/main/java/org/apache/maven/tools/converter/Main.java b/sandbox/pom-pre-alpha-converter/src/main/java/org/apache/maven/tools/converter/Main.java
index 5f100e1fd..4b8cfbb02 100755
--- a/sandbox/pom-pre-alpha-converter/src/main/java/org/apache/maven/tools/converter/Main.java
+++ b/sandbox/pom-pre-alpha-converter/src/main/java/org/apache/maven/tools/converter/Main.java
@@ -1,474 +1,480 @@
package org.apache.maven.tools.converter;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.model.Notifier;
import org.apache.maven.model.Reports;
import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
import org.apache.maven.model.v4_0_0.Build;
import org.apache.maven.model.v4_0_0.CiManagement;
import org.apache.maven.model.v4_0_0.Contributor;
import org.apache.maven.model.v4_0_0.Dependency;
import org.apache.maven.model.v4_0_0.DependencyManagement;
import org.apache.maven.model.v4_0_0.Developer;
import org.apache.maven.model.v4_0_0.Model;
import org.apache.maven.model.v4_0_0.Plugin;
import org.apache.maven.model.v4_0_0.PluginManagement;
import org.apache.maven.model.v4_0_0.DistributionManagement;
import org.apache.maven.model.v4_0_0.IssueManagement;
import org.apache.maven.model.v4_0_0.License;
import org.apache.maven.model.v4_0_0.MailingList;
import org.apache.maven.model.v4_0_0.Organization;
import org.apache.maven.model.v4_0_0.Parent;
import org.apache.maven.model.v4_0_0.Repository;
import org.apache.maven.model.v4_0_0.Scm;
import org.apache.maven.model.v4_0_0.Site;
import org.apache.maven.model.v4_0_0.io.xpp3.MavenXpp3Reader;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* @author <a href="mailto:[email protected]">Brett Porter</a>
* @version $Id$
*/
public class Main
{
public static void main( String[] args )
throws Exception
{
boolean reverse = false;
if ( args.length > 0 && args[0].equals( "-reverse" ) )
{
reverse = true;
}
List files = FileUtils.getFiles( new File( System.getProperty( "user.dir" ) ), "**/pom.xml", "" );
for ( Iterator i = files.iterator(); i.hasNext(); )
{
File file = (File) i.next();
System.out.println( "Processing file: " + file );
File backup = new File( file.getParent(), file.getName() + "~" );
if ( reverse )
{
+ if ( !backup.exists() )
+ {
+ System.err.println( "skipping... backup exists" );
+ continue;
+ }
FileUtils.copyFile( backup, file );
backup.delete();
}
else
{
MavenXpp3Reader reader = new MavenXpp3Reader();
MavenXpp3Writer writer = new MavenXpp3Writer();
try
{
if ( backup.exists() )
{
System.err.println( "skipping... backup exists" );
+ continue;
}
FileReader fileReader = new FileReader( file );
Model model = reader.read( fileReader );
fileReader.close();
org.apache.maven.model.Model newModel = new org.apache.maven.model.Model();
newModel.setArtifactId( model.getArtifactId() );
newModel.setBuild( convertBuild( model.getBuild(), convertPlugins( model.getPlugins() ) ) );
newModel.setCiManagement( convertCiManagement( model.getCiManagement() ) );
newModel.setContributors( convertContributors( model.getContributors() ) );
newModel.setDependencies( convertDependencies( model.getDependencies() ) );
newModel.setDependencyManagement( convertDependencyManagement( model.getDependencyManagement() ) );
newModel.setDescription( model.getDescription() );
newModel.setDevelopers( convertDevelopers( model.getDevelopers() ) );
newModel.setDistributionManagement(
convertDistributionManagement( model.getDistributionManagement() ) );
newModel.setExtend( model.getExtend() );
newModel.setGroupId( model.getGroupId() );
newModel.setInceptionYear( model.getInceptionYear() );
newModel.setIssueManagement( convertIssueManagement( model.getIssueManagement() ) );
newModel.setLicenses( convertLicenses( model.getLicenses() ) );
newModel.setMailingLists( convertMailingLists( model.getMailingLists() ) );
newModel.setModelVersion( model.getModelVersion() );
newModel.setName( model.getName() );
newModel.setOrganization( convertOrganization( model.getOrganization() ) );
newModel.setParent( convertParent( model.getParent() ) );
newModel.setPluginManagement( convertPluginManagement( model.getPluginManagement() ) );
newModel.setPluginRepositories( convertRepositories( model.getPluginRepositories() ) );
newModel.setReports( convertReports( model.getReports() ) );
newModel.setRepositories( convertRepositories( model.getRepositories() ) );
newModel.setScm( convertScm( model.getScm() ) );
newModel.setUrl( model.getUrl() );
newModel.setVersion( model.getVersion() );
FileUtils.copyFile( file, backup );
FileWriter fileWriter = new FileWriter( file );
writer.write( fileWriter, newModel );
fileWriter.close();
}
catch ( Exception e )
{
System.err.println( "Skipping " + file );
if ( args.length > 0 && args[0].equals( "-X" ) )
{
e.printStackTrace();
}
}
}
}
}
private static org.apache.maven.model.Scm convertScm( Scm scm )
{
if ( scm == null )
{
return null;
}
org.apache.maven.model.Scm newScm = new org.apache.maven.model.Scm();
newScm.setConnection( scm.getConnection() );
newScm.setDeveloperConnection( scm.getDeveloperConnection() );
newScm.setUrl( scm.getUrl() );
return newScm;
}
private static Reports convertReports( List reports )
{
if ( reports.isEmpty() )
{
return null;
}
Reports newReports = new Reports();
// newReports.setOutputDirectory( ); -- nothing needed
for ( Iterator i = reports.iterator(); i.hasNext(); )
{
String name = (String) i.next();
org.apache.maven.model.Plugin plugin = new org.apache.maven.model.Plugin();
plugin.setArtifactId( name );
newReports.addPlugin( plugin );
}
return newReports;
}
private static List convertRepositories( List repositories )
{
List newRepositorys = new ArrayList();
for ( Iterator i = repositories.iterator(); i.hasNext(); )
{
newRepositorys.add( convertRepository( (Repository) i.next() ) );
}
return newRepositorys;
}
private static org.apache.maven.model.Repository convertRepository( Repository repository )
{
if ( repository == null )
{
return null;
}
org.apache.maven.model.Repository newRepository = new org.apache.maven.model.Repository();
newRepository.setName( repository.getName() );
newRepository.setId( repository.getId() );
newRepository.setUrl( repository.getUrl() );
return newRepository;
}
private static org.apache.maven.model.Parent convertParent( Parent parent )
{
if ( parent == null )
{
return null;
}
org.apache.maven.model.Parent newParent = new org.apache.maven.model.Parent();
newParent.setArtifactId( parent.getArtifactId() );
newParent.setGroupId( parent.getGroupId() );
newParent.setVersion( parent.getVersion() );
return newParent;
}
private static org.apache.maven.model.Organization convertOrganization( Organization organization )
{
if ( organization == null )
{
return null;
}
org.apache.maven.model.Organization newOrganization = new org.apache.maven.model.Organization();
newOrganization.setName( organization.getName() );
newOrganization.setUrl( organization.getUrl() );
return newOrganization;
}
private static List convertMailingLists( List mailingLists )
{
List newMailinglists = new ArrayList();
for ( Iterator i = mailingLists.iterator(); i.hasNext(); )
{
MailingList mailinglist = (MailingList) i.next();
org.apache.maven.model.MailingList newMailinglist = new org.apache.maven.model.MailingList();
newMailinglist.setName( mailinglist.getName() );
newMailinglist.setArchive( mailinglist.getArchive() );
newMailinglist.setOtherArchives( mailinglist.getOtherArchives() );
newMailinglist.setPost( mailinglist.getPost() );
newMailinglist.setSubscribe( mailinglist.getSubscribe() );
newMailinglist.setUnsubscribe( mailinglist.getUnsubscribe() );
newMailinglists.add( newMailinglist );
}
return newMailinglists;
}
private static List convertLicenses( List licenses )
{
List newLicenses = new ArrayList();
for ( Iterator i = licenses.iterator(); i.hasNext(); )
{
License license = (License) i.next();
org.apache.maven.model.License newLicense = new org.apache.maven.model.License();
newLicense.setComments( license.getComments() );
newLicense.setName( license.getName() );
newLicense.setUrl( license.getUrl() );
newLicenses.add( newLicense );
}
return newLicenses;
}
private static org.apache.maven.model.IssueManagement convertIssueManagement( IssueManagement issueManagement )
{
if ( issueManagement == null )
{
return null;
}
org.apache.maven.model.IssueManagement mgmt = new org.apache.maven.model.IssueManagement();
mgmt.setSystem( issueManagement.getSystem() );
mgmt.setUrl( issueManagement.getUrl() );
return mgmt;
}
private static org.apache.maven.model.DistributionManagement convertDistributionManagement(
DistributionManagement distributionManagement )
{
if ( distributionManagement == null )
{
return null;
}
org.apache.maven.model.DistributionManagement mgmt = new org.apache.maven.model.DistributionManagement();
mgmt.setRepository( convertRepository( distributionManagement.getRepository() ) );
mgmt.setSite( convertSite( distributionManagement.getSite() ) );
return mgmt;
}
private static org.apache.maven.model.Site convertSite( Site site )
{
if ( site == null )
{
return null;
}
org.apache.maven.model.Site newSite = new org.apache.maven.model.Site();
newSite.setId( site.getId() );
newSite.setName( site.getName() );
newSite.setUrl( site.getUrl() );
return newSite;
}
private static org.apache.maven.model.DependencyManagement convertDependencyManagement(
DependencyManagement dependencyManagement )
{
if ( dependencyManagement == null )
{
return null;
}
org.apache.maven.model.DependencyManagement mgmt = new org.apache.maven.model.DependencyManagement();
mgmt.setDependencies( convertDependencies( dependencyManagement.getDependencies() ) );
return mgmt;
}
private static org.apache.maven.model.PluginManagement convertPluginManagement( PluginManagement pluginManagement )
{
if ( pluginManagement == null )
{
return null;
}
org.apache.maven.model.PluginManagement mgmt = new org.apache.maven.model.PluginManagement();
mgmt.setPlugins( convertPlugins( pluginManagement.getPlugins() ) );
return mgmt;
}
private static List convertDependencies( List dependencies )
{
List newDependencys = new ArrayList();
for ( Iterator i = dependencies.iterator(); i.hasNext(); )
{
Dependency dependency = (Dependency) i.next();
org.apache.maven.model.Dependency newDependency = new org.apache.maven.model.Dependency();
newDependency.setArtifactId( dependency.getArtifactId() );
newDependency.setGroupId( dependency.getGroupId() );
newDependency.setScope( dependency.getScope() );
newDependency.setType( dependency.getType() );
newDependency.setVersion( dependency.getVersion() );
newDependencys.add( newDependency );
}
return newDependencys;
}
private static List convertContributors( List contributors )
{
List newContributors = new ArrayList();
for ( Iterator i = contributors.iterator(); i.hasNext(); )
{
Contributor contributor = (Contributor) i.next();
org.apache.maven.model.Contributor newContributor = new org.apache.maven.model.Contributor();
newContributor.setEmail( contributor.getEmail() );
newContributor.setName( contributor.getName() );
newContributor.setOrganization( contributor.getOrganization() );
newContributor.setTimezone( contributor.getTimezone() );
newContributor.setRoles( contributor.getRoles() );
newContributor.setUrl( contributor.getUrl() );
newContributors.add( newContributor );
}
return newContributors;
}
private static List convertDevelopers( List developers )
{
List newDevelopers = new ArrayList();
for ( Iterator i = developers.iterator(); i.hasNext(); )
{
Developer developer = (Developer) i.next();
org.apache.maven.model.Developer newDeveloper = new org.apache.maven.model.Developer();
newDeveloper.setEmail( developer.getEmail() );
newDeveloper.setName( developer.getName() );
newDeveloper.setOrganization( developer.getOrganization() );
newDeveloper.setTimezone( developer.getTimezone() );
newDeveloper.setRoles( developer.getRoles() );
newDeveloper.setUrl( developer.getUrl() );
newDeveloper.setId( developer.getId() );
newDevelopers.add( newDeveloper );
}
return newDevelopers;
}
private static org.apache.maven.model.CiManagement convertCiManagement( CiManagement ciManagement )
{
if ( ciManagement == null )
{
return null;
}
org.apache.maven.model.CiManagement newCiManagement = new org.apache.maven.model.CiManagement();
newCiManagement.setSystem( ciManagement.getNagEmailAddress() );
newCiManagement.setUrl( ciManagement.getUrl() );
if ( ciManagement.getNagEmailAddress() != null )
{
Notifier notifier = new Notifier();
notifier.setAddress( ciManagement.getNagEmailAddress() );
notifier.setType( "email" );
newCiManagement.addNotifier( notifier );
}
return newCiManagement;
}
private static List convertPlugins( List plugins )
{
List newPlugins = new ArrayList();
for ( Iterator i = plugins.iterator(); i.hasNext(); )
{
Plugin plugin = (Plugin) i.next();
org.apache.maven.model.Plugin newPlugin = new org.apache.maven.model.Plugin();
newPlugin.setArtifactId( plugin.getId() );
newPlugin.setConfiguration( plugin.getConfiguration() );
newPlugin.setDisabled( plugin.isDisabled() );
newPlugin.setGoals( plugin.getGoals() );
// newPlugin.setGroupId( "maven" ); -- nothing needed
newPlugins.add( newPlugin );
}
return newPlugins;
}
private static org.apache.maven.model.Build convertBuild( Build build, List plugins )
{
if ( build == null )
{
return null;
}
org.apache.maven.model.Build newBuild = new org.apache.maven.model.Build();
newBuild.setDirectory( build.getDirectory() );
newBuild.setFinalName( build.getFinalName() );
newBuild.setOutputDirectory( build.getOutput() );
newBuild.setPlugins( plugins );
newBuild.setSourceDirectory( build.getSourceDirectory() );
newBuild.setTestOutputDirectory( build.getTestOutput() );
newBuild.setTestSourceDirectory( build.getUnitTestSourceDirectory() );
return newBuild;
}
}
| false | true | public static void main( String[] args )
throws Exception
{
boolean reverse = false;
if ( args.length > 0 && args[0].equals( "-reverse" ) )
{
reverse = true;
}
List files = FileUtils.getFiles( new File( System.getProperty( "user.dir" ) ), "**/pom.xml", "" );
for ( Iterator i = files.iterator(); i.hasNext(); )
{
File file = (File) i.next();
System.out.println( "Processing file: " + file );
File backup = new File( file.getParent(), file.getName() + "~" );
if ( reverse )
{
FileUtils.copyFile( backup, file );
backup.delete();
}
else
{
MavenXpp3Reader reader = new MavenXpp3Reader();
MavenXpp3Writer writer = new MavenXpp3Writer();
try
{
if ( backup.exists() )
{
System.err.println( "skipping... backup exists" );
}
FileReader fileReader = new FileReader( file );
Model model = reader.read( fileReader );
fileReader.close();
org.apache.maven.model.Model newModel = new org.apache.maven.model.Model();
newModel.setArtifactId( model.getArtifactId() );
newModel.setBuild( convertBuild( model.getBuild(), convertPlugins( model.getPlugins() ) ) );
newModel.setCiManagement( convertCiManagement( model.getCiManagement() ) );
newModel.setContributors( convertContributors( model.getContributors() ) );
newModel.setDependencies( convertDependencies( model.getDependencies() ) );
newModel.setDependencyManagement( convertDependencyManagement( model.getDependencyManagement() ) );
newModel.setDescription( model.getDescription() );
newModel.setDevelopers( convertDevelopers( model.getDevelopers() ) );
newModel.setDistributionManagement(
convertDistributionManagement( model.getDistributionManagement() ) );
newModel.setExtend( model.getExtend() );
newModel.setGroupId( model.getGroupId() );
newModel.setInceptionYear( model.getInceptionYear() );
newModel.setIssueManagement( convertIssueManagement( model.getIssueManagement() ) );
newModel.setLicenses( convertLicenses( model.getLicenses() ) );
newModel.setMailingLists( convertMailingLists( model.getMailingLists() ) );
newModel.setModelVersion( model.getModelVersion() );
newModel.setName( model.getName() );
newModel.setOrganization( convertOrganization( model.getOrganization() ) );
newModel.setParent( convertParent( model.getParent() ) );
newModel.setPluginManagement( convertPluginManagement( model.getPluginManagement() ) );
newModel.setPluginRepositories( convertRepositories( model.getPluginRepositories() ) );
newModel.setReports( convertReports( model.getReports() ) );
newModel.setRepositories( convertRepositories( model.getRepositories() ) );
newModel.setScm( convertScm( model.getScm() ) );
newModel.setUrl( model.getUrl() );
newModel.setVersion( model.getVersion() );
FileUtils.copyFile( file, backup );
FileWriter fileWriter = new FileWriter( file );
writer.write( fileWriter, newModel );
fileWriter.close();
}
catch ( Exception e )
{
System.err.println( "Skipping " + file );
if ( args.length > 0 && args[0].equals( "-X" ) )
{
e.printStackTrace();
}
}
}
}
}
| public static void main( String[] args )
throws Exception
{
boolean reverse = false;
if ( args.length > 0 && args[0].equals( "-reverse" ) )
{
reverse = true;
}
List files = FileUtils.getFiles( new File( System.getProperty( "user.dir" ) ), "**/pom.xml", "" );
for ( Iterator i = files.iterator(); i.hasNext(); )
{
File file = (File) i.next();
System.out.println( "Processing file: " + file );
File backup = new File( file.getParent(), file.getName() + "~" );
if ( reverse )
{
if ( !backup.exists() )
{
System.err.println( "skipping... backup exists" );
continue;
}
FileUtils.copyFile( backup, file );
backup.delete();
}
else
{
MavenXpp3Reader reader = new MavenXpp3Reader();
MavenXpp3Writer writer = new MavenXpp3Writer();
try
{
if ( backup.exists() )
{
System.err.println( "skipping... backup exists" );
continue;
}
FileReader fileReader = new FileReader( file );
Model model = reader.read( fileReader );
fileReader.close();
org.apache.maven.model.Model newModel = new org.apache.maven.model.Model();
newModel.setArtifactId( model.getArtifactId() );
newModel.setBuild( convertBuild( model.getBuild(), convertPlugins( model.getPlugins() ) ) );
newModel.setCiManagement( convertCiManagement( model.getCiManagement() ) );
newModel.setContributors( convertContributors( model.getContributors() ) );
newModel.setDependencies( convertDependencies( model.getDependencies() ) );
newModel.setDependencyManagement( convertDependencyManagement( model.getDependencyManagement() ) );
newModel.setDescription( model.getDescription() );
newModel.setDevelopers( convertDevelopers( model.getDevelopers() ) );
newModel.setDistributionManagement(
convertDistributionManagement( model.getDistributionManagement() ) );
newModel.setExtend( model.getExtend() );
newModel.setGroupId( model.getGroupId() );
newModel.setInceptionYear( model.getInceptionYear() );
newModel.setIssueManagement( convertIssueManagement( model.getIssueManagement() ) );
newModel.setLicenses( convertLicenses( model.getLicenses() ) );
newModel.setMailingLists( convertMailingLists( model.getMailingLists() ) );
newModel.setModelVersion( model.getModelVersion() );
newModel.setName( model.getName() );
newModel.setOrganization( convertOrganization( model.getOrganization() ) );
newModel.setParent( convertParent( model.getParent() ) );
newModel.setPluginManagement( convertPluginManagement( model.getPluginManagement() ) );
newModel.setPluginRepositories( convertRepositories( model.getPluginRepositories() ) );
newModel.setReports( convertReports( model.getReports() ) );
newModel.setRepositories( convertRepositories( model.getRepositories() ) );
newModel.setScm( convertScm( model.getScm() ) );
newModel.setUrl( model.getUrl() );
newModel.setVersion( model.getVersion() );
FileUtils.copyFile( file, backup );
FileWriter fileWriter = new FileWriter( file );
writer.write( fileWriter, newModel );
fileWriter.close();
}
catch ( Exception e )
{
System.err.println( "Skipping " + file );
if ( args.length > 0 && args[0].equals( "-X" ) )
{
e.printStackTrace();
}
}
}
}
}
|
diff --git a/araqne-logstorage/src/main/java/org/araqne/logstorage/engine/LogStorageEngine.java b/araqne-logstorage/src/main/java/org/araqne/logstorage/engine/LogStorageEngine.java
index ce9d9e27..0ab547e3 100644
--- a/araqne-logstorage/src/main/java/org/araqne/logstorage/engine/LogStorageEngine.java
+++ b/araqne-logstorage/src/main/java/org/araqne/logstorage/engine/LogStorageEngine.java
@@ -1,1565 +1,1566 @@
/*
* Copyright 2010 NCHOVY
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.araqne.logstorage.engine;
import java.io.IOException;
import java.io.SyncFailedException;
import java.nio.BufferUnderflowException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import org.apache.felix.ipojo.annotations.Component;
import org.apache.felix.ipojo.annotations.Invalidate;
import org.apache.felix.ipojo.annotations.Provides;
import org.apache.felix.ipojo.annotations.Requires;
import org.apache.felix.ipojo.annotations.Unbind;
import org.apache.felix.ipojo.annotations.Validate;
import org.araqne.api.PrimitiveConverter;
import org.araqne.confdb.Config;
import org.araqne.confdb.ConfigDatabase;
import org.araqne.confdb.ConfigService;
import org.araqne.confdb.Predicates;
import org.araqne.log.api.LogParser;
import org.araqne.log.api.LogParserBugException;
import org.araqne.log.api.LogParserBuilder;
import org.araqne.logstorage.CachedRandomSeeker;
import org.araqne.logstorage.CallbackSet;
import org.araqne.logstorage.DateUtil;
import org.araqne.logstorage.LockKey;
import org.araqne.logstorage.LockStatus;
import org.araqne.logstorage.Log;
import org.araqne.logstorage.LogCallback;
import org.araqne.logstorage.LogCursor;
import org.araqne.logstorage.LogFileService;
import org.araqne.logstorage.LogFileServiceEventListener;
import org.araqne.logstorage.LogFileServiceRegistry;
import org.araqne.logstorage.LogMarshaler;
import org.araqne.logstorage.LogRetentionPolicy;
import org.araqne.logstorage.LogStorage;
import org.araqne.logstorage.LogStorageEventListener;
import org.araqne.logstorage.LogStorageStatus;
import org.araqne.logstorage.LogTableRegistry;
import org.araqne.logstorage.LogTraverseCallback;
import org.araqne.logstorage.LogWriterStatus;
import org.araqne.logstorage.ReplicaStorageConfig;
import org.araqne.logstorage.ReplicationMode;
import org.araqne.logstorage.SimpleLogTraverseCallback;
import org.araqne.logstorage.TableEventListener;
import org.araqne.logstorage.TableLock;
import org.araqne.logstorage.TableNotFoundException;
import org.araqne.logstorage.TableScanRequest;
import org.araqne.logstorage.TableSchema;
import org.araqne.logstorage.UnsupportedLogFileTypeException;
import org.araqne.logstorage.WriteFallback;
import org.araqne.logstorage.WriterPreparationException;
import org.araqne.logstorage.file.DatapathUtil;
import org.araqne.logstorage.file.LogFileReader;
import org.araqne.logstorage.file.LogFileServiceV2;
import org.araqne.logstorage.file.LogFileWriter;
import org.araqne.logstorage.file.LogRecordCursor;
import org.araqne.storage.api.FilePath;
import org.araqne.storage.api.StorageManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Component(name = "logstorage-engine")
@Provides
public class LogStorageEngine implements LogStorage, TableEventListener, LogFileServiceEventListener {
private static final String DEFAULT_LOGFILETYPE = "v2";
private final Logger logger = LoggerFactory.getLogger(LogStorageEngine.class.getName());
private static final int DEFAULT_LOG_CHECK_INTERVAL = 1000;
private static final int DEFAULT_MAX_IDLE_TIME = 600000; // 10min
private static final int DEFAULT_LOG_FLUSH_INTERVAL = 60000; // 60sec
private LogStorageStatus status = LogStorageStatus.Closed;
@Requires
private LogTableRegistry tableRegistry;
@Requires
private ConfigService conf;
@Requires
private LogFileServiceRegistry lfsRegistry;
@Requires
private StorageManager storageManager;
// online writers
private ConcurrentMap<OnlineWriterKey, OnlineWriter> onlineWriters;
private ConcurrentMap<OnlineWriterKey, AtomicLong> lastIds;
private CopyOnWriteArraySet<LogCallback> callbacks;
private CallbackSet callbackSet;
private CallbackSet fallbackSet;
// sweeping and flushing data
private WriterSweeper writerSweeper;
private Thread writerSweeperThread;
private LogFileFetcher fetcher;
private FilePath logDir;
private ConcurrentHashMap<String, Integer> tableNameCache;
// private CopyOnWriteArraySet<LogStorageEventListener> listeners;
@Unbind
public void unbind(LogFileServiceRegistry reg) {
logger.info("log file service registry unbinded");
}
public LogStorageEngine() {
int checkInterval = getIntParameter(Constants.LogCheckInterval, DEFAULT_LOG_CHECK_INTERVAL);
int maxIdleTime = getIntParameter(Constants.LogMaxIdleTime, DEFAULT_MAX_IDLE_TIME);
int flushInterval = getIntParameter(Constants.LogFlushInterval, DEFAULT_LOG_FLUSH_INTERVAL);
onlineWriters = new ConcurrentHashMap<OnlineWriterKey, OnlineWriter>();
lastIds = new ConcurrentHashMap<OnlineWriterKey, AtomicLong>();
writerSweeper = new WriterSweeper(checkInterval, maxIdleTime, flushInterval);
callbacks = new CopyOnWriteArraySet<LogCallback>();
fallbackSet = new CallbackSet();
callbackSet = new CallbackSet();
tableNameCache = new ConcurrentHashMap<String, Integer>();
logDir = storageManager.resolveFilePath(System.getProperty("araqne.data.dir")).newFilePath("araqne-logstorage/log");
logDir = storageManager.resolveFilePath(getStringParameter(Constants.LogStorageDirectory, logDir.getAbsolutePath()));
logDir.mkdirs();
DatapathUtil.setLogDir(logDir);
// listeners = new CopyOnWriteArraySet<LogStorageEventListener>();
}
@Override
public FilePath getDirectory() {
return logDir;
}
@Override
public void setDirectory(FilePath f) {
if (f == null)
throw new IllegalArgumentException("storage path should be not null");
if (!f.isDirectory())
throw new IllegalArgumentException("storage path should be directory");
ConfigUtil.set(conf, Constants.LogStorageDirectory, f.getAbsolutePath());
logDir = f;
DatapathUtil.setLogDir(logDir);
}
private String getStringParameter(Constants key, String defaultValue) {
String value = ConfigUtil.get(conf, key);
if (value != null)
return value;
return defaultValue;
}
private int getIntParameter(Constants key, int defaultValue) {
String value = ConfigUtil.get(conf, key);
if (value != null)
return Integer.valueOf(value);
return defaultValue;
}
@Override
public LogStorageStatus getStatus() {
return status;
}
@Validate
@Override
public void start() {
FilePath sysArgLogDir = storageManager.resolveFilePath(System.getProperty("araqne.data.dir")).newFilePath(
"araqne-logstorage/log");
logDir = storageManager
.resolveFilePath(getStringParameter(Constants.LogStorageDirectory, sysArgLogDir.getAbsolutePath()));
logDir.mkdirs();
DatapathUtil.setLogDir(logDir);
if (status != LogStorageStatus.Closed)
throw new IllegalStateException("log archive already started");
status = LogStorageStatus.Starting;
fetcher = new LogFileFetcher(tableRegistry, lfsRegistry, storageManager);
writerSweeperThread = new Thread(writerSweeper, "LogStorage LogWriter Sweeper");
writerSweeperThread.start();
// load table name cache
tableNameCache.clear();
for (TableSchema schema : tableRegistry.getTableSchemas()) {
tableNameCache.put(schema.getName(), schema.getId());
}
tableRegistry.addListener(this);
lfsRegistry.addListener(this);
status = LogStorageStatus.Open;
}
@Invalidate
@Override
public void stop() {
if (status != LogStorageStatus.Open)
throw new IllegalStateException("log archive already stopped");
status = LogStorageStatus.Stopping;
try {
if (tableRegistry != null) {
tableRegistry.removeListener(this);
}
} catch (IllegalStateException e) {
if (!e.getMessage().contains("Cannot create the Nullable Object"))
throw e;
}
writerSweeper.doStop = true;
synchronized (writerSweeper) {
writerSweeper.notifyAll();
}
// wait writer sweeper stop
try {
for (int i = 0; i < 25; i++) {
if (writerSweeper.isStopped)
break;
Thread.sleep(200);
}
} catch (InterruptedException e) {
}
// close all writers
for (OnlineWriterKey key : onlineWriters.keySet()) {
try {
OnlineWriter writer = onlineWriters.get(key);
if (writer != null)
writer.close();
} catch (Throwable t) {
logger.warn("exception caught", t);
}
}
onlineWriters.clear();
lastIds.clear();
lfsRegistry.removeListener(this);
status = LogStorageStatus.Closed;
}
@Override
public void createTable(TableSchema schema) {
tableRegistry.createTable(schema);
}
@Override
public void ensureTable(TableSchema schema) {
if (!tableRegistry.exists(schema.getName()))
createTable(schema);
}
@Override
public void alterTable(String tableName, TableSchema schema) {
tableRegistry.alterTable(tableName, schema);
}
@Override
public void dropTable(String tableName) {
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
int tableId = schema.getId();
Collection<Date> dates = getLogDates(tableName);
// drop retention policy
ConfigDatabase db = conf.ensureDatabase("araqne-logstorage");
Config c = db.findOne(LogRetentionPolicy.class, Predicates.field("table_name", tableName));
if (c != null)
c.remove();
Lock tLock = tableRegistry.getExclusiveTableLock(tableName, "engine", "dropTable");
try {
tLock.lock();
// drop table metadata
tableRegistry.dropTable(tableName);
// evict online writers
for (Date day : dates) {
OnlineWriterKey key = new OnlineWriterKey(tableName, day, tableId);
OnlineWriter writer = onlineWriters.get(key);
if (writer != null) {
writer.close();
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: removing logger [{}] according to table drop", key);
onlineWriters.remove(key);
}
}
// purge existing files
FilePath tableDir = getTableDirectory(schema);
if (!tableDir.exists())
return;
// delete all .idx, .dat, .key files
for (FilePath f : tableDir.listFiles()) {
String name = f.getName();
if (f.isFile() && (name.endsWith(".idx") || name.endsWith(".dat") || name.endsWith(".key"))) {
ensureDelete(f);
}
}
// delete directory if empty
if (tableDir.listFiles().length == 0) {
logger.info("araqne logstorage: deleted table {} directory", tableName);
tableDir.delete();
}
} finally {
if (tLock != null)
tLock.unlock();
}
}
@Override
public LogRetentionPolicy getRetentionPolicy(String tableName) {
ConfigDatabase db = conf.ensureDatabase("araqne-logstorage");
Config c = db.findOne(LogRetentionPolicy.class, Predicates.field("table_name", tableName));
if (c == null)
return null;
return c.getDocument(LogRetentionPolicy.class);
}
@Override
public void setRetentionPolicy(LogRetentionPolicy policy) {
if (policy == null)
throw new IllegalArgumentException("policy should not be null");
ConfigDatabase db = conf.ensureDatabase("araqne-logstorage");
Config c = db.findOne(LogRetentionPolicy.class, Predicates.field("table_name", policy.getTableName()));
if (c == null) {
db.add(policy);
} else {
c.setDocument(PrimitiveConverter.serialize(policy));
c.update();
}
}
@Override
public FilePath getTableDirectory(String tableName) {
if (!tableRegistry.exists(tableName))
throw new IllegalArgumentException("table not exists: " + tableName);
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
return getTableDirectory(schema);
}
private FilePath getTableDirectory(TableSchema schema) {
FilePath baseDir = logDir;
if (schema.getPrimaryStorage().getBasePath() != null)
baseDir = storageManager.resolveFilePath(schema.getPrimaryStorage().getBasePath());
if (baseDir == null)
return null;
return baseDir.newFilePath(Integer.toString(schema.getId()));
}
@Override
public Collection<Date> getLogDates(String tableName) {
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
String storageType = schema.getPrimaryStorage().getType();
LogFileService lfs = lfsRegistry.getLogFileService(storageType);
if (lfs == null)
throw new UnsupportedLogFileTypeException(storageType);
return lfs.getPartitions(tableName);
}
@Override
public Collection<Date> getLogDates(String tableName, Date from, Date to) {
List<Date> l = new ArrayList<Date>();
for (Date d : getLogDates(tableName)) {
if (from != null && d.before(from))
continue;
if (to != null && d.after(to))
continue;
l.add(d);
}
Collections.sort(l, Collections.reverseOrder());
return l;
}
@Override
public boolean tryWrite(Log log, long waitFor, TimeUnit tu) throws InterruptedException {
// inlined verify() for fast-path performance
if (status != LogStorageStatus.Open)
throw new IllegalStateException("archive not opened");
// write data
String tableName = log.getTableName();
int tryCnt = 0;
int tryCntLimit = 2;
for (; tryCnt < tryCntLimit; tryCnt++) {
TableLock tl = tableRegistry.getSharedTableLock(tableName);
BackOffLock bol = new BackOffLock(tl);
try {
do {
boolean locked = bol.tryLock();
if (locked) {
OnlineWriter writer = loadOnlineWriter(tableName, log.getDate());
writer.write(log);
break;
} else {
if (callWriteFallback(Arrays.asList(log), tl))
return true;
}
} while (!bol.isDone());
if (!bol.hasLocked())
return false;
else
break;
} catch (WriterPreparationException ex) {
continue;
} catch (TimeoutException ex) {
throw new IllegalStateException(ex);
} catch (IOException e) {
if (e.getMessage().contains("closed")) {
logger.info("closed online writer: trying one more time");
continue;
}
throw new IllegalStateException("cannot write log: " + tableName + ", " + log.getDate());
} finally {
bol.unlock();
}
}
if (tryCnt == tryCntLimit) {
throw new IllegalStateException("cannot write [1] logs to table [" + tableName + "]: retry count exceeded");
}
if (log.getId() == 0)
throw new IllegalStateException("cannot write log: " + tableName + ", " + log.getDate());
// invoke log callbacks
List<Log> one = Arrays.asList(log);
invokeLogCallbacks(log.getTableName(), one);
return true;
}
private boolean callWriteFallback(List<Log> logs, TableLock tl) {
for (WriteFallback fb : fallbackSet.get(WriteFallback.class)) {
int handled = fb.onLockFailure(tl, logs);
if (handled == logs.size())
return true;
}
return false;
}
@Override
public boolean tryWrite(List<Log> logs, long waitFor, TimeUnit tu) throws InterruptedException {
// inlined verify() for fast-path performance
if (status != LogStorageStatus.Open)
throw new IllegalStateException("archive not opened");
HashMap<OnlineWriterKey, List<Log>> keyLogs = new HashMap<OnlineWriterKey, List<Log>>();
for (Log log : logs) {
Integer tableId = tableNameCache.get(log.getTableName());
if (tableId == null)
throw new TableNotFoundException(log.getTableName());
OnlineWriterKey writerKey = new OnlineWriterKey(log.getTableName(), log.getDay(), tableId);
List<Log> l = keyLogs.get(writerKey);
if (l == null) {
l = new ArrayList<Log>();
keyLogs.put(writerKey, l);
}
l.add(log);
}
HashMap<OnlineWriterKey, BackOffLock> locks = new HashMap<OnlineWriterKey, BackOffLock>();
try {
for (OnlineWriterKey k : keyLogs.keySet()) {
TableLock lock = tableRegistry.getSharedTableLock(k.getTableName());
BackOffLock bol = new BackOffLock(lock, waitFor, tu);
do {
boolean locked = bol.tryLock();
if (locked) {
locks.put(k, bol);
} else {
if (callWriteFallback(keyLogs.get(k), lock))
break;
}
} while (!bol.isDone());
}
// write data
for (Entry<OnlineWriterKey, List<Log>> e : keyLogs.entrySet()) {
OnlineWriterKey writerKey = e.getKey();
String tableName = writerKey.getTableName();
List<Log> l = e.getValue();
if (!locks.containsKey(writerKey))
continue;
int tryCnt = 0;
int tryCntLimit = 2;
for (; tryCnt < tryCntLimit; tryCnt++) {
try {
OnlineWriter writer = loadOnlineWriter(writerKey.getTableName(), writerKey.getDay());
writer.write(l);
break;
} catch (WriterPreparationException ex) {
logger.debug("WriterPreparationException", ex);
// retry
} catch (TimeoutException ex) {
throw new IllegalStateException("cannot write [" + l.size() + "] logs to table [" + tableName + "]", ex);
} catch (InterruptedException ex) {
throw new IllegalStateException("cannot write [" + l.size() + "] logs to table [" + tableName + "]", ex);
} catch (IOException ex) {
if (ex.getMessage().contains("closed")) {
logger.info("araqne logstorage: closed online writer, trying one more time");
continue;
}
throw new IllegalStateException("cannot write [" + l.size() + "] logs to table [" + tableName + "]", ex);
}
}
if (tryCnt == tryCntLimit) {
logger.info("tryCnt == tryCntLimit");
throw new IllegalStateException("cannot write [" + l.size() + "] logs to table [" + tableName
+ "]: retry count exceeded");
}
invokeLogCallbacks(writerKey.getTableName(), l);
}
} catch (RuntimeException e) {
logger.error("unexpected exception", e);
throw e;
} finally {
for (BackOffLock l : locks.values()) {
l.unlock();
}
}
return true;
}
private void invokeLogCallbacks(String tableName, List<Log> l) {
// invoke log callbacks
for (LogCallback callback : callbacks) {
try {
callback.onLogBatch(tableName, l);
} catch (Exception ex) {
logger.warn("araqne logstorage: log callback should not throw any exception", ex);
}
}
}
@Override
public Collection<Log> getLogs(String tableName, Date from, Date to, int limit) {
return getLogs(tableName, from, to, 0, limit);
}
@Override
public Collection<Log> getLogs(String tableName, Date from, Date to, long offset, int limit) {
final List<Log> ret = new ArrayList<Log>(limit);
try {
LogTraverseCallback.Sink listSink = new LogTraverseCallback.Sink(offset, limit) {
@Override
protected void processLogs(List<Log> logs) {
ret.addAll(logs);
}
};
search(new TableScanRequest(tableName, from, to, null, new SimpleLogTraverseCallback(listSink)));
} catch (InterruptedException e) {
throw new RuntimeException("interrupted");
}
return ret;
}
@Override
public Date getPurgeBaseline(String tableName) {
LogRetentionPolicy p = getRetentionPolicy(tableName);
if (p == null || p.getRetentionDays() == 0)
return null;
Collection<Date> logDays = getLogDates(tableName);
Date lastLogDay = getMaxDay(logDays.iterator());
if (lastLogDay == null)
return null;
Date now = new Date();
if (lastLogDay.after(now))
lastLogDay = now;
return getBaseline(lastLogDay, p.getRetentionDays());
}
private Date getBaseline(Date lastDay, int days) {
Calendar c = Calendar.getInstance();
c.setTime(lastDay);
c.add(Calendar.DAY_OF_MONTH, -days);
c.set(Calendar.HOUR_OF_DAY, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
return c.getTime();
}
private Date getMaxDay(Iterator<Date> days) {
Date max = null;
while (days.hasNext()) {
Date day = days.next();
if (max == null)
max = day;
else if (max != null && day.after(max))
max = day;
}
return max;
}
@Override
public void purge(String tableName, Date day) {
purge(tableName, day, false);
}
@Override
public void purge(String tableName, Date day, boolean skipArgCheck) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
FilePath dir = getTableDirectory(tableName);
if (!skipArgCheck) {
ReplicaStorageConfig config = ReplicaStorageConfig.parseTableSchema(schema);
if (config != null && config.mode() != ReplicationMode.ACTIVE)
throw new IllegalArgumentException("specified table has replica storage config and cannot purge non-active table");
}
// evict online buffer and close
OnlineWriter writer = onlineWriters.remove(new OnlineWriterKey(tableName, day, schema.getId()));
if (writer != null)
writer.close();
String fileName = dateFormat.format(day);
FilePath idxFile = dir.newFilePath(fileName + ".idx");
FilePath datFile = dir.newFilePath(fileName + ".dat");
for (LogStorageEventListener listener : callbackSet.get(LogStorageEventListener.class)) {
try {
listener.onPurge(tableName, day);
} catch (Throwable t) {
logger.error("araqne logstorage: storage event listener should not throw any exception", t);
}
}
logger.debug("araqne logstorage: try to purge log data of table [{}], day [{}]", tableName, fileName);
ensureDelete(idxFile);
ensureDelete(datFile);
}
@SuppressWarnings("unchecked")
public static <T> CopyOnWriteArraySet<T> getCallbacks(ConcurrentMap<Class<?>, CopyOnWriteArraySet<?>> callbackSets,
Class<T> class1) {
CopyOnWriteArraySet<?> result = callbackSets.get(class1);
if (result == null) {
result = new CopyOnWriteArraySet<T>();
CopyOnWriteArraySet<?> concensus = callbackSets.putIfAbsent(class1, result);
if (concensus != null)
result = concensus;
}
return (CopyOnWriteArraySet<T>) result;
}
@Override
public void purge(String tableName, Date fromDay, Date toDay) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
String from = "unbound";
if (fromDay != null)
from = dateFormat.format(fromDay);
String to = "unbound";
if (toDay != null)
to = dateFormat.format(toDay);
logger.debug("araqne logstorage: try to purge log data of table [{}], range [{}~{}]",
new Object[] { tableName, from, to });
List<Date> purgeDays = new ArrayList<Date>();
for (Date day : getLogDates(tableName)) {
// check range
if (fromDay != null && day.before(fromDay))
continue;
if (toDay != null && day.after(toDay))
continue;
purgeDays.add(day);
}
for (Date day : purgeDays) {
purge(tableName, day);
}
}
private boolean ensureDelete(FilePath f) {
final int MAX_TIMEOUT = 30000;
long begin = System.currentTimeMillis();
while (true) {
if (!f.exists() || f.delete()) {
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: deleted log file [{}]", f.getAbsolutePath());
return true;
}
if (System.currentTimeMillis() - begin > MAX_TIMEOUT) {
logger.error("araqne logstorage: delete timeout, cannot delete log file [{}]", f.getAbsolutePath());
return false;
}
}
}
@Override
public CachedRandomSeeker openCachedRandomSeeker() {
verify();
for (OnlineWriter writer : onlineWriters.values()) {
try {
writer.sync();
} catch (IOException e) {
logger.error("araqne logstorage: cannot sync online writer", e);
}
}
return new CachedRandomSeekerImpl(tableRegistry, fetcher, onlineWriters, logDir);
}
@Override
public LogCursor openCursor(String tableName, Date day, boolean ascending) throws IOException {
verify();
Integer tableId = tableNameCache.get(tableName);
if (tableId == null)
throw new TableNotFoundException(tableName);
OnlineWriter onlineWriter = onlineWriters.get(new OnlineWriterKey(tableName, day, tableId));
ArrayList<Log> buffer = null;
if (onlineWriter != null)
buffer = (ArrayList<Log>) onlineWriter.getBuffer();
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
String basePathString = schema.getPrimaryStorage().getBasePath();
FilePath basePath = logDir;
if (basePathString != null)
basePath = storageManager.resolveFilePath(basePathString);
FilePath indexPath = DatapathUtil.getIndexFile(tableId, day, basePath);
FilePath dataPath = DatapathUtil.getDataFile(tableId, day, basePath);
FilePath keyPath = DatapathUtil.getKeyFile(tableId, day, basePath);
String logFileType = schema.getPrimaryStorage().getType();
LogFileServiceV2.Option options = new LogFileServiceV2.Option(schema.getPrimaryStorage(), schema.getMetadata(),
tableName, basePath, indexPath, dataPath, keyPath);
options.put("day", day);
syncOnlineWriter(onlineWriter);
LogFileReader reader = lfsRegistry.newReader(tableName, logFileType, options);
return new LogCursorImpl(tableName, day, buffer, reader, ascending);
}
private OnlineWriter loadOnlineWriter(String tableName, Date date) throws TimeoutException, InterruptedException,
WriterPreparationException {
// check table existence
Integer tableId = tableNameCache.get(tableName);
if (tableId == null)
throw new TableNotFoundException(tableName);
Date day = DateUtil.getDay(date);
OnlineWriterKey key = new OnlineWriterKey(tableName, day, tableId);
OnlineWriter online = onlineWriters.get(key);
if (online != null && !online.isClosed()) {
if (online.isReady()) {
return online;
} else {
online.awaitWriterPreparation();
return online;
}
}
try {
OnlineWriter oldWriter = onlineWriters.get(key);
// @formatter:off
/*
* statuses of OnlineWriter
*
* writer closing writer.closed 1) null false false 2) !null false
* false 3) !null true false 4) !null true true
*/
// @formatter:on
if (oldWriter != null) {
if (oldWriter.isCloseCompleted()) {
while (onlineWriters.get(key) == oldWriter)
Thread.yield();
return loadNewOnlineWriter(key, getLogFileType(tableName));
} else if (oldWriter.isClosed()) {
synchronized (oldWriter) {
while (!oldWriter.isCloseCompleted()) {
oldWriter.wait(1000);
}
while (onlineWriters.get(key) == oldWriter)
Thread.yield();
return loadNewOnlineWriter(key, getLogFileType(tableName));
}
} else {
return loadNewOnlineWriter(key, getLogFileType(tableName));
}
} else {
return loadNewOnlineWriter(key, getLogFileType(tableName));
}
} catch (UnsupportedLogFileTypeException e) {
throw new IllegalStateException("cannot open writer: " + tableName + ", date=" + day, e);
} catch (IOException e) {
throw new IllegalStateException("cannot open writer: " + tableName + ", date=" + day, e);
}
}
private String getLogFileType(String tableName) {
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
return schema.getPrimaryStorage().getType();
}
private OnlineWriter loadNewOnlineWriter(OnlineWriterKey key, String logFileType) throws IOException, InterruptedException,
TimeoutException, WriterPreparationException {
OnlineWriter newWriter = newOnlineWriter(key.getTableName(), key.getDay(), logFileType);
OnlineWriter consensus = onlineWriters.putIfAbsent(key, newWriter);
if (consensus == null) {
try {
AtomicLong lastKey = getLastKey(key);
newWriter.prepareWriter(storageManager, callbackSet, logDir, lastKey);
return newWriter;
} catch (IOException e) {
logger.error("loadNewOnlineWriter failed: " + key, e);
onlineWriters.remove(key, newWriter);
throw e;
} catch (RuntimeException e) {
logger.error("loadNewOnlineWriter failed: " + key, e);
onlineWriters.remove(key, newWriter);
throw e;
}
} else {
try {
consensus.awaitWriterPreparation();
if (!consensus.isReady())
throw new IllegalStateException("log writer preparation failed - " + key);
else {
return consensus;
}
} finally {
if (consensus != newWriter)
newWriter.close();
}
}
}
private OnlineWriter newOnlineWriter(String tableName, Date day, String logFileType) throws InterruptedException {
if (logFileType == null)
logFileType = DEFAULT_LOGFILETYPE;
LogFileService lfs = lfsRegistry.getLogFileService(logFileType);
if (lfs == null) {
throw new UnsupportedLogFileTypeException(logFileType);
}
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
Lock tableLock = tableRegistry.getSharedTableLock(tableName);
try {
tableLock.lock();
return new OnlineWriter(lfs, schema, day);
} finally {
tableLock.unlock();
}
}
private AtomicLong getLastKey(OnlineWriterKey key) {
if (!lastIds.containsKey(key))
lastIds.putIfAbsent(key, new AtomicLong(-1));
return lastIds.get(key);
}
@Override
public void reload() {
int flushInterval = getIntParameter(Constants.LogFlushInterval, DEFAULT_LOG_FLUSH_INTERVAL);
int maxIdleTime = getIntParameter(Constants.LogMaxIdleTime, DEFAULT_MAX_IDLE_TIME);
writerSweeper.setFlushInterval(flushInterval);
writerSweeper.setMaxIdleTime(maxIdleTime);
}
@Override
public void flush() {
synchronized (writerSweeper) {
writerSweeper.setFlushAll(true);
writerSweeper.notifyAll();
}
}
@Override
public void addLogListener(LogCallback callback) {
callbacks.add(callback);
}
@Override
public void removeLogListener(LogCallback callback) {
callbacks.remove(callback);
}
private void verify() {
if (status != LogStorageStatus.Open)
throw new IllegalStateException("archive not opened");
}
@Override
public List<LogWriterStatus> getWriterStatuses() {
List<LogWriterStatus> writers = new ArrayList<LogWriterStatus>(onlineWriters.size());
for (OnlineWriterKey key : onlineWriters.keySet()) {
OnlineWriter writer = onlineWriters.get(key);
LogWriterStatus s = new LogWriterStatus();
s.setTableName(key.getTableName());
s.setDay(key.getDay());
s.setLastWrite(writer.getLastAccess());
s.setBufferSize(writer.getBuffer().size());
writers.add(s);
}
return writers;
}
/**
* @since 2.7.0
*/
@Override
public LogFileWriter getOnlineWriter(String tableName, Date day) {
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
int tableId = schema.getId();
OnlineWriter online = onlineWriters.get(new OnlineWriterKey(tableName, day, tableId));
if (online == null)
return null;
return online.getWriter();
}
@Override
public void addEventListener(LogStorageEventListener listener) {
callbackSet.get(LogStorageEventListener.class).add(listener);
}
@Override
public void removeEventListener(LogStorageEventListener listener) {
callbackSet.get(LogStorageEventListener.class).remove(listener);
}
@Override
public <T> void addEventListener(Class<T> clazz, T listener) {
callbackSet.get(clazz).add(listener);
}
@Override
public <T> void removeEventListener(Class<T> clazz, T listener) {
callbackSet.get(clazz).remove(listener);
}
@Override
public <T> void addFallback(Class<T> clazz, T fallback) {
fallbackSet.get(clazz).add(fallback);
}
@Override
public <T> void removeFallback(Class<T> clazz, T fallback) {
fallbackSet.get(clazz).remove(fallback);
}
private class WriterSweeper implements Runnable {
private final Logger logger = LoggerFactory.getLogger(WriterSweeper.class.getName());
private volatile int checkInterval;
private volatile int maxIdleTime;
private volatile int flushInterval;
private volatile boolean doStop = false;
private volatile boolean isStopped = true;
private volatile boolean flushAll = false;
public WriterSweeper(int checkInterval, int maxIdleTime, int flushInterval) {
this.checkInterval = checkInterval;
this.maxIdleTime = maxIdleTime;
this.flushInterval = flushInterval;
}
public void setFlushInterval(int flushInterval) {
this.flushInterval = flushInterval;
}
public void setMaxIdleTime(int maxIdleTime) {
this.maxIdleTime = maxIdleTime;
}
public void setFlushAll(boolean flushAll) {
this.flushAll = flushAll;
}
@Override
public void run() {
try {
isStopped = false;
while (true) {
try {
if (doStop)
break;
synchronized (this) {
this.wait(checkInterval);
}
sweep();
} catch (InterruptedException e) {
logger.trace("araqne logstorage: sweeper interrupted");
} catch (Exception e) {
logger.error("araqne logstorage: sweeper error", e);
}
}
} finally {
doStop = false;
isStopped = true;
}
logger.info("araqne logstorage: writer sweeper stopped");
}
private void sweep() {
List<OnlineWriterKey> evicts = new ArrayList<OnlineWriterKey>();
long now = new Date().getTime();
try {
// periodic log flush
boolean flushAll = this.flushAll;
this.flushAll = false;
for (OnlineWriterKey key : onlineWriters.keySet()) {
OnlineWriter writer = onlineWriters.get(key);
if (writer == null || writer.isClosed() || !writer.isReady())
continue;
boolean doFlush = writer.isCloseReserved() || ((now - writer.getLastFlush().getTime()) > flushInterval);
doFlush = flushAll ? true : doFlush;
if (doFlush) {
try {
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: flushing writer [{}]", key);
writer.flush();
} catch (IOException e) {
logger.error("araqne logstorage: log flush failed", e);
}
}
// close file if writer is in idle state
int interval = (int) (now - writer.getLastAccess().getTime());
if (interval > maxIdleTime || writer.isCloseReserved())
evicts.add(key);
}
} catch (ConcurrentModificationException e) {
}
closeAndKickout(evicts);
}
private void closeAndKickout(List<OnlineWriterKey> evicts) {
for (OnlineWriterKey key : evicts) {
OnlineWriter evictee = onlineWriters.get(key);
if (evictee != null) {
evictee.close();
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: evict logger [{}]", key);
onlineWriters.remove(key);
}
}
}
}
private static class LogCursorImpl implements LogCursor {
private String tableName;
private Date day;
private ArrayList<Log> buffer;
private LogFileReader reader;
private LogRecordCursor cursor;
private boolean ascending;
private Log prefetch;
private int bufferNext;
private int bufferTotal;
public LogCursorImpl(String tableName, Date day, ArrayList<Log> buffer, LogFileReader reader, boolean ascending)
throws IOException {
this.tableName = tableName;
this.day = day;
this.reader = reader;
this.cursor = reader.getCursor(ascending);
this.ascending = ascending;
if (buffer != null) {
this.buffer = buffer;
this.bufferTotal = buffer.size();
this.bufferNext = ascending ? 0 : bufferTotal - 1;
}
}
@Override
public boolean hasNext() {
if (prefetch != null)
return true;
if (ascending) {
if (cursor.hasNext()) {
prefetch = LogMarshaler.convert(tableName, cursor.next());
return true;
}
if (bufferNext < bufferTotal) {
prefetch = buffer.get(bufferNext++);
return true;
}
return false;
} else {
if (bufferNext < 0) {
prefetch = buffer.get(bufferNext--);
return true;
}
if (cursor.hasNext()) {
prefetch = LogMarshaler.convert(tableName, cursor.next());
return true;
}
return false;
}
}
@Override
public Log next() {
if (!hasNext())
throw new NoSuchElementException("end of log cursor");
Log log = prefetch;
prefetch = null;
return log;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public void close() {
reader.close();
}
@Override
public String toString() {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
return "log cursor for table " + tableName + ", day " + dateFormat.format(day);
}
}
@Override
public void onCreate(TableSchema schema) {
tableNameCache.put(schema.getName(), schema.getId());
}
@Override
public void onAlter(TableSchema oldSchema, TableSchema newSchema) {
}
@Override
public void onDrop(TableSchema schema) {
tableNameCache.remove(schema.getName());
}
public void purgeOnlineWriters() {
List<OnlineWriterKey> keys = new ArrayList<OnlineWriterKey>();
for (Map.Entry<OnlineWriterKey, OnlineWriter> e : onlineWriters.entrySet()) {
e.getValue().close();
keys.add(e.getKey());
}
for (OnlineWriterKey key : keys) {
onlineWriters.remove(key);
}
}
@Override
public void onUnloadingFileService(String engineName) {
List<OnlineWriterKey> toRemove = new ArrayList<OnlineWriterKey>();
for (OnlineWriterKey key : onlineWriters.keySet()) {
try {
OnlineWriter writer = onlineWriters.get(key);
if (writer != null && writer.getFileServiceType().equals(engineName))
toRemove.add(key);
} catch (Throwable t) {
logger.warn("exception caught", t);
}
}
for (OnlineWriterKey key : toRemove) {
try {
OnlineWriter writer = onlineWriters.get(key);
if (writer != null) {
writer.close();
onlineWriters.remove(key);
}
} catch (Throwable t) {
logger.warn("exception caught", t);
}
}
}
@Override
public boolean search(TableScanRequest req) throws InterruptedException {
verify();
String tableName = req.getTableName();
Collection<Date> days = getLogDates(tableName);
List<Date> filtered = DateUtil.filt(days, req.getFrom(), req.getTo());
logger.trace("araqne logstorage: searching {} tablets of table [{}]", filtered.size(), tableName);
if (req.isAsc())
Collections.sort(filtered);
for (Date day : filtered) {
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: searching table {}, date={}", tableName, DateUtil.getDayText(day));
searchTablet(req, day);
if (req.getTraverseCallback().isEof())
break;
}
return !req.getTraverseCallback().isEof();
}
private void syncOnlineWriter(OnlineWriter onlineWriter) {
if (onlineWriter != null) {
try {
onlineWriter.sync();
} catch (SyncFailedException e) {
logger.debug("araqne logstorage: sync failed", e);
} catch (IOException e) {
logger.error("araqne logstorage: cannot sync online writer", e);
}
}
}
@Override
public boolean searchTablet(TableScanRequest req, Date day) throws InterruptedException {
String tableName = req.getTableName();
Date from = req.getFrom();
Date to = req.getTo();
long minId = req.getMinId();
long maxId = req.getMaxId();
LogParserBuilder builder = req.getParserBuilder();
LogTraverseCallback c = req.getTraverseCallback();
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
int tableId = schema.getId();
String basePathString = schema.getPrimaryStorage().getBasePath();
FilePath basePath = logDir;
if (basePathString != null)
basePath = storageManager.resolveFilePath(basePathString);
FilePath indexPath = DatapathUtil.getIndexFile(tableId, day, basePath);
FilePath dataPath = DatapathUtil.getDataFile(tableId, day, basePath);
FilePath keyPath = DatapathUtil.getKeyFile(tableId, day, basePath);
LogFileReader reader = null;
long onlineMinId = -1;
List<Log> logs = null;
try {
// do NOT use getOnlineWriter() here (it loads empty writer on cache
// automatically if writer not found)
OnlineWriter onlineWriter = onlineWriters.get(new OnlineWriterKey(tableName, day, tableId));
if (onlineWriter != null) {
List<Log> buffer = onlineWriter.getBuffer();
syncOnlineWriter(onlineWriter);
if (buffer != null && !buffer.isEmpty()) {
LogParser parser = null;
if (builder != null)
parser = builder.build();
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: {} logs in writer buffer.", buffer.size());
logs = new ArrayList<Log>(buffer.size());
if (req.isAsc()) {
for (Log logData : buffer) {
if (onlineMinId == -1)
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
} else {
ListIterator<Log> li = buffer.listIterator(buffer.size());
while (li.hasPrevious()) {
Log logData = li.previous();
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
}
if (c.isEof())
return false;
}
}
- logger.debug("#buffer flush bug# logs size: {}", logs.size());
+ if (logs != null && logger.isDebugEnabled())
+ logger.debug("#buffer flush bug# logs size: {}", logs.size());
String logFileType = schema.getPrimaryStorage().getType();
LogFileServiceV2.Option options = new LogFileServiceV2.Option(schema.getPrimaryStorage(), schema.getMetadata(),
tableName, basePath, indexPath, dataPath, keyPath);
options.put("day", day);
syncOnlineWriter(onlineWriter);
reader = lfsRegistry.newReader(tableName, logFileType, options);
long flushedMaxId = (onlineMinId > 0) ? onlineMinId - 1 : maxId;
long readerMaxId = maxId != -1 ? Math.min(flushedMaxId, maxId) : flushedMaxId;
logger.debug("#buffer flush bug# minId: {}, readerMaxId: {}", minId, readerMaxId);
logger.debug("#buffer flush bug# maxId: {}, onlineMinId: {}", maxId, flushedMaxId);
if (minId < 0 || readerMaxId < 0 || readerMaxId >= minId) {
if (req.isAsc()) {
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
if (logs != null)
c.writeLogs(logs);
} else {
if (logs != null)
c.writeLogs(logs);
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
}
}
} catch (InterruptedException e) {
throw e;
} catch (IllegalStateException e) {
c.setFailure(e);
Throwable cause = e.getCause();
if (cause instanceof BufferUnderflowException || cause instanceof IOException)
c.setFailure(cause);
if (e.getMessage().contains("license is locked"))
logger.warn("araqne logstorage: search tablet failed. {}", e.getMessage());
else if (logger.isTraceEnabled())
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (BufferUnderflowException e) {
c.setFailure(e);
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (IOException e) {
c.setFailure(e);
logger.warn("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (Exception e) {
c.setFailure(e);
logger.error("araqne logstorage: search tablet failed", e);
} finally {
if (reader != null)
reader.close();
}
return !c.isEof();
}
@Override
public StorageManager getStorageManager() {
return storageManager;
}
@Override
public boolean lock(LockKey key, String purpose, long timeout, TimeUnit unit) throws InterruptedException {
Lock lock = tableRegistry.getExclusiveTableLock(key.tableName, key.owner, purpose);
if (lock.tryLock(timeout, unit)) {
flush(key.tableName);
return true;
} else {
return false;
}
}
@Override
public void unlock(LockKey storageLockKey, String purpose) {
Lock lock = tableRegistry.getExclusiveTableLock(storageLockKey.tableName, storageLockKey.owner, purpose);
lock.unlock();
}
@Override
public LockStatus lockStatus(LockKey storageLockKey) {
return tableRegistry.getTableLockStatus(storageLockKey.tableName);
}
@Override
public void flush(String tableName) {
HashMap<OnlineWriterKey, CountDownLatch> monitors = new HashMap<OnlineWriterKey, CountDownLatch>();
for (OnlineWriterKey key : onlineWriters.keySet()) {
if (key.getTableName().equals(tableName)) {
OnlineWriter ow = onlineWriters.get(key);
CountDownLatch monitor = ow.reserveClose();
monitors.put(key, monitor);
}
}
synchronized (writerSweeper) {
writerSweeper.notifyAll();
}
for (Map.Entry<OnlineWriterKey, CountDownLatch> e : monitors.entrySet()) {
waitForClose(e.getKey(), e.getValue());
}
}
@SuppressWarnings("serial")
private static class SweeperThreadStoppedException extends RuntimeException {
}
private void waitForClose(OnlineWriterKey key, CountDownLatch monitor) {
try {
if (writerSweeperThread.isAlive()) {
boolean closed = monitor.await(1, TimeUnit.MINUTES);
if (!closed) {
logger.info("wait for closing Table: {}", key.getTableName());
if (writerSweeperThread.isAlive())
monitor.await();
else
throw new SweeperThreadStoppedException();
}
} else {
throw new SweeperThreadStoppedException();
}
} catch (SweeperThreadStoppedException e) {
OnlineWriter o = onlineWriters.get(key);
if (o != null)
o.close();
} catch (InterruptedException e) {
logger.warn("wait for closing interrupted: {}", key.getTableName());
OnlineWriter o = onlineWriters.get(key);
if (o != null)
o.close();
}
}
@Override
public boolean tryWrite(Log log) {
try {
return tryWrite(log, 0, TimeUnit.SECONDS);
} catch (InterruptedException e) {
return false;
}
}
@Override
public boolean tryWrite(List<Log> logs) {
try {
return tryWrite(logs, 0, TimeUnit.SECONDS);
} catch (InterruptedException e) {
return false;
}
}
@Override
public void write(Log log) throws InterruptedException {
tryWrite(log, Long.MAX_VALUE, TimeUnit.SECONDS);
}
@Override
public void write(List<Log> logs) throws InterruptedException {
tryWrite(logs, Long.MAX_VALUE, TimeUnit.SECONDS);
}
}
| true | true | public boolean searchTablet(TableScanRequest req, Date day) throws InterruptedException {
String tableName = req.getTableName();
Date from = req.getFrom();
Date to = req.getTo();
long minId = req.getMinId();
long maxId = req.getMaxId();
LogParserBuilder builder = req.getParserBuilder();
LogTraverseCallback c = req.getTraverseCallback();
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
int tableId = schema.getId();
String basePathString = schema.getPrimaryStorage().getBasePath();
FilePath basePath = logDir;
if (basePathString != null)
basePath = storageManager.resolveFilePath(basePathString);
FilePath indexPath = DatapathUtil.getIndexFile(tableId, day, basePath);
FilePath dataPath = DatapathUtil.getDataFile(tableId, day, basePath);
FilePath keyPath = DatapathUtil.getKeyFile(tableId, day, basePath);
LogFileReader reader = null;
long onlineMinId = -1;
List<Log> logs = null;
try {
// do NOT use getOnlineWriter() here (it loads empty writer on cache
// automatically if writer not found)
OnlineWriter onlineWriter = onlineWriters.get(new OnlineWriterKey(tableName, day, tableId));
if (onlineWriter != null) {
List<Log> buffer = onlineWriter.getBuffer();
syncOnlineWriter(onlineWriter);
if (buffer != null && !buffer.isEmpty()) {
LogParser parser = null;
if (builder != null)
parser = builder.build();
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: {} logs in writer buffer.", buffer.size());
logs = new ArrayList<Log>(buffer.size());
if (req.isAsc()) {
for (Log logData : buffer) {
if (onlineMinId == -1)
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
} else {
ListIterator<Log> li = buffer.listIterator(buffer.size());
while (li.hasPrevious()) {
Log logData = li.previous();
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
}
if (c.isEof())
return false;
}
}
logger.debug("#buffer flush bug# logs size: {}", logs.size());
String logFileType = schema.getPrimaryStorage().getType();
LogFileServiceV2.Option options = new LogFileServiceV2.Option(schema.getPrimaryStorage(), schema.getMetadata(),
tableName, basePath, indexPath, dataPath, keyPath);
options.put("day", day);
syncOnlineWriter(onlineWriter);
reader = lfsRegistry.newReader(tableName, logFileType, options);
long flushedMaxId = (onlineMinId > 0) ? onlineMinId - 1 : maxId;
long readerMaxId = maxId != -1 ? Math.min(flushedMaxId, maxId) : flushedMaxId;
logger.debug("#buffer flush bug# minId: {}, readerMaxId: {}", minId, readerMaxId);
logger.debug("#buffer flush bug# maxId: {}, onlineMinId: {}", maxId, flushedMaxId);
if (minId < 0 || readerMaxId < 0 || readerMaxId >= minId) {
if (req.isAsc()) {
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
if (logs != null)
c.writeLogs(logs);
} else {
if (logs != null)
c.writeLogs(logs);
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
}
}
} catch (InterruptedException e) {
throw e;
} catch (IllegalStateException e) {
c.setFailure(e);
Throwable cause = e.getCause();
if (cause instanceof BufferUnderflowException || cause instanceof IOException)
c.setFailure(cause);
if (e.getMessage().contains("license is locked"))
logger.warn("araqne logstorage: search tablet failed. {}", e.getMessage());
else if (logger.isTraceEnabled())
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (BufferUnderflowException e) {
c.setFailure(e);
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (IOException e) {
c.setFailure(e);
logger.warn("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (Exception e) {
c.setFailure(e);
logger.error("araqne logstorage: search tablet failed", e);
} finally {
if (reader != null)
reader.close();
}
return !c.isEof();
}
| public boolean searchTablet(TableScanRequest req, Date day) throws InterruptedException {
String tableName = req.getTableName();
Date from = req.getFrom();
Date to = req.getTo();
long minId = req.getMinId();
long maxId = req.getMaxId();
LogParserBuilder builder = req.getParserBuilder();
LogTraverseCallback c = req.getTraverseCallback();
TableSchema schema = tableRegistry.getTableSchema(tableName, true);
int tableId = schema.getId();
String basePathString = schema.getPrimaryStorage().getBasePath();
FilePath basePath = logDir;
if (basePathString != null)
basePath = storageManager.resolveFilePath(basePathString);
FilePath indexPath = DatapathUtil.getIndexFile(tableId, day, basePath);
FilePath dataPath = DatapathUtil.getDataFile(tableId, day, basePath);
FilePath keyPath = DatapathUtil.getKeyFile(tableId, day, basePath);
LogFileReader reader = null;
long onlineMinId = -1;
List<Log> logs = null;
try {
// do NOT use getOnlineWriter() here (it loads empty writer on cache
// automatically if writer not found)
OnlineWriter onlineWriter = onlineWriters.get(new OnlineWriterKey(tableName, day, tableId));
if (onlineWriter != null) {
List<Log> buffer = onlineWriter.getBuffer();
syncOnlineWriter(onlineWriter);
if (buffer != null && !buffer.isEmpty()) {
LogParser parser = null;
if (builder != null)
parser = builder.build();
if (logger.isTraceEnabled())
logger.trace("araqne logstorage: {} logs in writer buffer.", buffer.size());
logs = new ArrayList<Log>(buffer.size());
if (req.isAsc()) {
for (Log logData : buffer) {
if (onlineMinId == -1)
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
} else {
ListIterator<Log> li = buffer.listIterator(buffer.size());
while (li.hasPrevious()) {
Log logData = li.previous();
onlineMinId = logData.getId();
if ((from == null || !logData.getDate().before(from)) && (to == null || logData.getDate().before(to))
&& (minId < 0 || minId <= logData.getId()) && (maxId < 0 || maxId >= logData.getId())) {
List<Log> result = null;
try {
result = LogFileReader.parse(tableName, parser, logData);
} catch (LogParserBugException e) {
result = Arrays.asList(new Log[] { new Log(e.tableName, e.date, e.id, e.logMap) });
c.setFailure(e);
}
logs.addAll(result);
}
}
}
if (c.isEof())
return false;
}
}
if (logs != null && logger.isDebugEnabled())
logger.debug("#buffer flush bug# logs size: {}", logs.size());
String logFileType = schema.getPrimaryStorage().getType();
LogFileServiceV2.Option options = new LogFileServiceV2.Option(schema.getPrimaryStorage(), schema.getMetadata(),
tableName, basePath, indexPath, dataPath, keyPath);
options.put("day", day);
syncOnlineWriter(onlineWriter);
reader = lfsRegistry.newReader(tableName, logFileType, options);
long flushedMaxId = (onlineMinId > 0) ? onlineMinId - 1 : maxId;
long readerMaxId = maxId != -1 ? Math.min(flushedMaxId, maxId) : flushedMaxId;
logger.debug("#buffer flush bug# minId: {}, readerMaxId: {}", minId, readerMaxId);
logger.debug("#buffer flush bug# maxId: {}, onlineMinId: {}", maxId, flushedMaxId);
if (minId < 0 || readerMaxId < 0 || readerMaxId >= minId) {
if (req.isAsc()) {
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
if (logs != null)
c.writeLogs(logs);
} else {
if (logs != null)
c.writeLogs(logs);
TableScanRequest tabletReq = req.clone();
tabletReq.setMaxId(readerMaxId);
reader.traverse(tabletReq);
}
}
} catch (InterruptedException e) {
throw e;
} catch (IllegalStateException e) {
c.setFailure(e);
Throwable cause = e.getCause();
if (cause instanceof BufferUnderflowException || cause instanceof IOException)
c.setFailure(cause);
if (e.getMessage().contains("license is locked"))
logger.warn("araqne logstorage: search tablet failed. {}", e.getMessage());
else if (logger.isTraceEnabled())
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (BufferUnderflowException e) {
c.setFailure(e);
logger.trace("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (IOException e) {
c.setFailure(e);
logger.warn("araqne logstorage: search tablet failed. logfile may be not synced yet", e);
} catch (Exception e) {
c.setFailure(e);
logger.error("araqne logstorage: search tablet failed", e);
} finally {
if (reader != null)
reader.close();
}
return !c.isEof();
}
|
diff --git a/amibe/src/org/jcae/mesh/amibe/algos2d/Insertion.java b/amibe/src/org/jcae/mesh/amibe/algos2d/Insertion.java
index 304dd292..98203d06 100644
--- a/amibe/src/org/jcae/mesh/amibe/algos2d/Insertion.java
+++ b/amibe/src/org/jcae/mesh/amibe/algos2d/Insertion.java
@@ -1,310 +1,312 @@
/* jCAE stand for Java Computer Aided Engineering. Features are : Small CAD
modeler, Finite element mesher, Plugin architecture.
Copyright (C) 2003,2004,2005,2006, by EADS CRC
Copyright (C) 2007, by EADS France
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jcae.mesh.amibe.algos2d;
import org.jcae.mesh.amibe.ds.TriangleVH;
import org.jcae.mesh.amibe.ds.Triangle;
import org.jcae.mesh.amibe.ds.AbstractHalfEdge;
import org.jcae.mesh.amibe.ds.Vertex;
import org.jcae.mesh.amibe.patch.Mesh2D;
import org.jcae.mesh.amibe.patch.VirtualHalfEdge2D;
import org.jcae.mesh.amibe.patch.Vertex2D;
import java.util.Iterator;
import java.util.ArrayList;
import gnu.trove.PrimeFinder;
import org.apache.log4j.Logger;
/**
* Insert nodes to produce a unit mesh. Process all edges; if an edge
* is longer than sqrt(2), candidate vertices are added to a bucket
* to virtually provide unit length subsegments.
* The next step is to take vertices from the bucket in random order.
* For each vertex <code>v</code>, the closest vertex <code>w</code>
* already present in the mesh is returned by
* {@link org.jcae.mesh.amibe.util.KdTree#getNearestVertex(Mesh, Vertex)}
* If the distance between <code>v</code> and <code>w</code> is lower
* than 1/sqrt(2), <code>v</code> is dropped, otherwise it is inserted
* into the mesh. Just after a vertex is inserted, incident edges are
* swapped if they are not Delaunay.
* The whole process is repeated until no new vertex is added.
*
* <p>
* If all vertices of an edge were inserted at the same time, adjacent
* edges may get in trouble because their candidate vertices could be
* too near from these points. In order to avoid this problem, vertices
* are processed in a random order so that all edges have a chance to be
* splitted. As we want reproducible meshes, a pseudo-random order is
* preferred.
* </p>
*
* <p>
* Triangle centroids are also inserted if they are not too near of
* existing vertices. This was added to try to improve triangle
* quality, but is a bad idea. Bad triangles should instead be sorted
* (with {@link org.jcae.mesh.amibe.util.PAVLSortedTree}) and their
* circumcenter added to the mesh if the overall quality is improved,
* </p>
*/
public class Insertion
{
private static Logger logger=Logger.getLogger(Insertion.class);
private Mesh2D mesh;
private double minlen = 1.0 / Math.sqrt(2.);
private double maxlen = 1.0 * Math.sqrt(2.);
/**
* Creates a <code>Insertion</code> instance.
*
* @param m the <code>Mesh2D</code> instance to refine.
*/
public Insertion(Mesh2D m)
{
mesh = m;
}
public Insertion(Mesh2D m, double scale)
{
mesh = m;
minlen = scale / Math.sqrt(2.);
maxlen = scale * Math.sqrt(2.);
}
/**
* Iteratively insert inner nodes.
*/
public void compute()
{
int nrIter = 0;
- int tooNearNodes = 0;
- int kdtreeSplit = 0;
logger.debug(" Insert inner nodes");
ArrayList<Vertex2D> nodes = new ArrayList<Vertex2D>();
ArrayList<Vertex2D> triNodes = new ArrayList<Vertex2D>();
VirtualHalfEdge2D sym = new VirtualHalfEdge2D();
VirtualHalfEdge2D ot = new VirtualHalfEdge2D();
// We do not want to split boundary edges.
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.BOUNDARY))
ot.setAttributes(AbstractHalfEdge.MARKED);
else
ot.clearAttributes(AbstractHalfEdge.MARKED);
}
}
// We try to insert new nodes by splitting large edges. As edge collapse
// is costful, nodes are inserted only if it does not create small edges,
// which means that nodes are not deleted.
// We iterate over all edges, and put candidate nodes into triNodes.
// If an edge has no candidates, either because it is small or because no
// nodes can be inserted, it is tagged and will not have to be checked
// during next iterations.
while (true)
{
nrIter++;
// Maximal number of nodes which are inserted on an edge
int maxNodes = 0;
// Number of checked edges
int checked = 0;
+ // Number of nodes which are too near from existing vertices
+ int tooNearNodes = 0;
+ // Number of quadtree cells split
+ int kdtreeSplit = 0;
nodes.clear();
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
triNodes.clear();
// Maximal number of nodes which are inserted on edges of this triangle
int nrTriNodes = 0;
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
sym.bind((TriangleVH) ot.getTri(), ot.getLocalNumber());
sym.sym();
if (sym.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
double l = mesh.compGeom().length(ot);
if (l < maxlen)
{
// This edge is smaller than target size and is not split
ot.setAttributes(AbstractHalfEdge.MARKED);
sym.setAttributes(AbstractHalfEdge.MARKED);
continue;
}
// Tag symmetric edge so that edges are checked only once
sym.setAttributes(AbstractHalfEdge.MARKED);
// Long edges are discretized, but do not create more than 4 subsegments
double lcrit = 1.0;
if (l > 4.0)
lcrit = l / 4.0;
Vertex2D start = (Vertex2D) ot.origin();
Vertex2D end = (Vertex2D) ot.destination();
double [] xs = start.getUV();
double [] xe = end.getUV();
int segments = (int) (2.0*l/lcrit) + 10;
Vertex [] np = new Vertex[segments-1];
for (int ns = 1; ns < segments; ns++)
np[ns-1] = mesh.createVertex(xs[0]+ns*(xe[0]-xs[0])/segments, xs[1]+ns*(xe[1]-xs[1])/segments);
Vertex2D last = start;
int nrNodes = 0;
l = 0.0;
for (int ns = 0; ns < segments-1; ns++)
{
l = mesh.compGeom().distance(last, (Vertex2D) np[ns]);
if (l > lcrit)
{
last = (Vertex2D) np[ns];
triNodes.add(last);
l = 0.0;
nrNodes++;
}
}
if (nrNodes > nrTriNodes)
{
nrTriNodes = nrNodes;
}
else if (nrNodes == 0)
{
ot.setAttributes(AbstractHalfEdge.MARKED);
}
checked++;
}
if (nrTriNodes > maxNodes)
maxNodes = nrTriNodes;
if (!triNodes.isEmpty())
{
// Process in pseudo-random order
int prime = PrimeFinder.nextPrime(nrTriNodes);
int imax = triNodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
for (int i = 0; i < imax; i++)
{
Vertex2D v = triNodes.get(index);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, v);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, v);
if (mesh.compGeom().distance(v, n) > minlen)
{
mesh.getQuadTree().add(v);
nodes.add(v);
}
else
tooNearNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
}
}
// Try to insert triangle centroid after all other points.
Vertex2D c = null;
for (Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
if (c == null)
c = (Vertex2D) mesh.createVertex(0.0, 0.0);
c.centroid((Vertex2D[]) t.vertex);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, c);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, c);
if (mesh.compGeom().distance(c, n) > minlen)
{
mesh.getQuadTree().add(c);
nodes.add(c);
c = null;
}
else
tooNearNodes++;
}
if (nodes.isEmpty())
break;
for (Iterator<Vertex2D> it = nodes.iterator(); it.hasNext(); )
{
Vertex2D v = it.next();
// These vertices are not bound to any triangles, so
// they must be removed, otherwise getSurroundingOTriangle
// may return a null pointer.
mesh.getQuadTree().remove(v);
}
// Process in pseudo-random order. There is at most maxNodes nodes
// on an edge, we choose an increment step greater than this value
// to try to split all edges.
int prime = PrimeFinder.nextPrime(maxNodes);
int imax = nodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
int skippedNodes = 0;
for (int i = 0; i < imax; i++)
{
Vertex2D v = nodes.get(index);
VirtualHalfEdge2D vt = v.getSurroundingOTriangle(mesh);
if (!vt.split3(mesh, v, false))
skippedNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
if (logger.isDebugEnabled())
{
logger.debug("Mesh now contains "+mesh.getTriangles().size()+" triangles");
if (checked > 0)
logger.debug(checked+" edges checked");
if (imax - skippedNodes > 0)
logger.debug((imax-skippedNodes)+" nodes added");
if (tooNearNodes > 0)
logger.debug(tooNearNodes+" nodes are too near from existing vertices and cannot be inserted");
if (skippedNodes > 0)
logger.debug(skippedNodes+" nodes cannot be inserted");
if (kdtreeSplit > 0)
logger.debug(kdtreeSplit+" quadtree cells split");
}
if (skippedNodes == nodes.size())
break;
}
logger.debug("Number of iterations to insert all nodes: "+nrIter);
}
}
| false | true | public void compute()
{
int nrIter = 0;
int tooNearNodes = 0;
int kdtreeSplit = 0;
logger.debug(" Insert inner nodes");
ArrayList<Vertex2D> nodes = new ArrayList<Vertex2D>();
ArrayList<Vertex2D> triNodes = new ArrayList<Vertex2D>();
VirtualHalfEdge2D sym = new VirtualHalfEdge2D();
VirtualHalfEdge2D ot = new VirtualHalfEdge2D();
// We do not want to split boundary edges.
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.BOUNDARY))
ot.setAttributes(AbstractHalfEdge.MARKED);
else
ot.clearAttributes(AbstractHalfEdge.MARKED);
}
}
// We try to insert new nodes by splitting large edges. As edge collapse
// is costful, nodes are inserted only if it does not create small edges,
// which means that nodes are not deleted.
// We iterate over all edges, and put candidate nodes into triNodes.
// If an edge has no candidates, either because it is small or because no
// nodes can be inserted, it is tagged and will not have to be checked
// during next iterations.
while (true)
{
nrIter++;
// Maximal number of nodes which are inserted on an edge
int maxNodes = 0;
// Number of checked edges
int checked = 0;
nodes.clear();
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
triNodes.clear();
// Maximal number of nodes which are inserted on edges of this triangle
int nrTriNodes = 0;
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
sym.bind((TriangleVH) ot.getTri(), ot.getLocalNumber());
sym.sym();
if (sym.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
double l = mesh.compGeom().length(ot);
if (l < maxlen)
{
// This edge is smaller than target size and is not split
ot.setAttributes(AbstractHalfEdge.MARKED);
sym.setAttributes(AbstractHalfEdge.MARKED);
continue;
}
// Tag symmetric edge so that edges are checked only once
sym.setAttributes(AbstractHalfEdge.MARKED);
// Long edges are discretized, but do not create more than 4 subsegments
double lcrit = 1.0;
if (l > 4.0)
lcrit = l / 4.0;
Vertex2D start = (Vertex2D) ot.origin();
Vertex2D end = (Vertex2D) ot.destination();
double [] xs = start.getUV();
double [] xe = end.getUV();
int segments = (int) (2.0*l/lcrit) + 10;
Vertex [] np = new Vertex[segments-1];
for (int ns = 1; ns < segments; ns++)
np[ns-1] = mesh.createVertex(xs[0]+ns*(xe[0]-xs[0])/segments, xs[1]+ns*(xe[1]-xs[1])/segments);
Vertex2D last = start;
int nrNodes = 0;
l = 0.0;
for (int ns = 0; ns < segments-1; ns++)
{
l = mesh.compGeom().distance(last, (Vertex2D) np[ns]);
if (l > lcrit)
{
last = (Vertex2D) np[ns];
triNodes.add(last);
l = 0.0;
nrNodes++;
}
}
if (nrNodes > nrTriNodes)
{
nrTriNodes = nrNodes;
}
else if (nrNodes == 0)
{
ot.setAttributes(AbstractHalfEdge.MARKED);
}
checked++;
}
if (nrTriNodes > maxNodes)
maxNodes = nrTriNodes;
if (!triNodes.isEmpty())
{
// Process in pseudo-random order
int prime = PrimeFinder.nextPrime(nrTriNodes);
int imax = triNodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
for (int i = 0; i < imax; i++)
{
Vertex2D v = triNodes.get(index);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, v);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, v);
if (mesh.compGeom().distance(v, n) > minlen)
{
mesh.getQuadTree().add(v);
nodes.add(v);
}
else
tooNearNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
}
}
// Try to insert triangle centroid after all other points.
Vertex2D c = null;
for (Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
if (c == null)
c = (Vertex2D) mesh.createVertex(0.0, 0.0);
c.centroid((Vertex2D[]) t.vertex);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, c);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, c);
if (mesh.compGeom().distance(c, n) > minlen)
{
mesh.getQuadTree().add(c);
nodes.add(c);
c = null;
}
else
tooNearNodes++;
}
if (nodes.isEmpty())
break;
for (Iterator<Vertex2D> it = nodes.iterator(); it.hasNext(); )
{
Vertex2D v = it.next();
// These vertices are not bound to any triangles, so
// they must be removed, otherwise getSurroundingOTriangle
// may return a null pointer.
mesh.getQuadTree().remove(v);
}
// Process in pseudo-random order. There is at most maxNodes nodes
// on an edge, we choose an increment step greater than this value
// to try to split all edges.
int prime = PrimeFinder.nextPrime(maxNodes);
int imax = nodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
int skippedNodes = 0;
for (int i = 0; i < imax; i++)
{
Vertex2D v = nodes.get(index);
VirtualHalfEdge2D vt = v.getSurroundingOTriangle(mesh);
if (!vt.split3(mesh, v, false))
skippedNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
if (logger.isDebugEnabled())
{
logger.debug("Mesh now contains "+mesh.getTriangles().size()+" triangles");
if (checked > 0)
logger.debug(checked+" edges checked");
if (imax - skippedNodes > 0)
logger.debug((imax-skippedNodes)+" nodes added");
if (tooNearNodes > 0)
logger.debug(tooNearNodes+" nodes are too near from existing vertices and cannot be inserted");
if (skippedNodes > 0)
logger.debug(skippedNodes+" nodes cannot be inserted");
if (kdtreeSplit > 0)
logger.debug(kdtreeSplit+" quadtree cells split");
}
if (skippedNodes == nodes.size())
break;
}
logger.debug("Number of iterations to insert all nodes: "+nrIter);
}
| public void compute()
{
int nrIter = 0;
logger.debug(" Insert inner nodes");
ArrayList<Vertex2D> nodes = new ArrayList<Vertex2D>();
ArrayList<Vertex2D> triNodes = new ArrayList<Vertex2D>();
VirtualHalfEdge2D sym = new VirtualHalfEdge2D();
VirtualHalfEdge2D ot = new VirtualHalfEdge2D();
// We do not want to split boundary edges.
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.BOUNDARY))
ot.setAttributes(AbstractHalfEdge.MARKED);
else
ot.clearAttributes(AbstractHalfEdge.MARKED);
}
}
// We try to insert new nodes by splitting large edges. As edge collapse
// is costful, nodes are inserted only if it does not create small edges,
// which means that nodes are not deleted.
// We iterate over all edges, and put candidate nodes into triNodes.
// If an edge has no candidates, either because it is small or because no
// nodes can be inserted, it is tagged and will not have to be checked
// during next iterations.
while (true)
{
nrIter++;
// Maximal number of nodes which are inserted on an edge
int maxNodes = 0;
// Number of checked edges
int checked = 0;
// Number of nodes which are too near from existing vertices
int tooNearNodes = 0;
// Number of quadtree cells split
int kdtreeSplit = 0;
nodes.clear();
for(Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
ot.bind(t);
triNodes.clear();
// Maximal number of nodes which are inserted on edges of this triangle
int nrTriNodes = 0;
for (int i = 0; i < 3; i++)
{
ot.next();
if (ot.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
sym.bind((TriangleVH) ot.getTri(), ot.getLocalNumber());
sym.sym();
if (sym.hasAttributes(AbstractHalfEdge.MARKED))
{
// This edge has already been checked and cannot be split
continue;
}
double l = mesh.compGeom().length(ot);
if (l < maxlen)
{
// This edge is smaller than target size and is not split
ot.setAttributes(AbstractHalfEdge.MARKED);
sym.setAttributes(AbstractHalfEdge.MARKED);
continue;
}
// Tag symmetric edge so that edges are checked only once
sym.setAttributes(AbstractHalfEdge.MARKED);
// Long edges are discretized, but do not create more than 4 subsegments
double lcrit = 1.0;
if (l > 4.0)
lcrit = l / 4.0;
Vertex2D start = (Vertex2D) ot.origin();
Vertex2D end = (Vertex2D) ot.destination();
double [] xs = start.getUV();
double [] xe = end.getUV();
int segments = (int) (2.0*l/lcrit) + 10;
Vertex [] np = new Vertex[segments-1];
for (int ns = 1; ns < segments; ns++)
np[ns-1] = mesh.createVertex(xs[0]+ns*(xe[0]-xs[0])/segments, xs[1]+ns*(xe[1]-xs[1])/segments);
Vertex2D last = start;
int nrNodes = 0;
l = 0.0;
for (int ns = 0; ns < segments-1; ns++)
{
l = mesh.compGeom().distance(last, (Vertex2D) np[ns]);
if (l > lcrit)
{
last = (Vertex2D) np[ns];
triNodes.add(last);
l = 0.0;
nrNodes++;
}
}
if (nrNodes > nrTriNodes)
{
nrTriNodes = nrNodes;
}
else if (nrNodes == 0)
{
ot.setAttributes(AbstractHalfEdge.MARKED);
}
checked++;
}
if (nrTriNodes > maxNodes)
maxNodes = nrTriNodes;
if (!triNodes.isEmpty())
{
// Process in pseudo-random order
int prime = PrimeFinder.nextPrime(nrTriNodes);
int imax = triNodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
for (int i = 0; i < imax; i++)
{
Vertex2D v = triNodes.get(index);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, v);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, v);
if (mesh.compGeom().distance(v, n) > minlen)
{
mesh.getQuadTree().add(v);
nodes.add(v);
}
else
tooNearNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
}
}
// Try to insert triangle centroid after all other points.
Vertex2D c = null;
for (Iterator<Triangle> it = mesh.getTriangles().iterator(); it.hasNext(); )
{
TriangleVH t = (TriangleVH) it.next();
if (t.hasAttributes(AbstractHalfEdge.OUTER))
continue;
if (c == null)
c = (Vertex2D) mesh.createVertex(0.0, 0.0);
c.centroid((Vertex2D[]) t.vertex);
Vertex2D n = (Vertex2D) mesh.getQuadTree().getNearestVertex(mesh, c);
assert n == mesh.getQuadTree().getNearestVertexDebug(mesh, c);
if (mesh.compGeom().distance(c, n) > minlen)
{
mesh.getQuadTree().add(c);
nodes.add(c);
c = null;
}
else
tooNearNodes++;
}
if (nodes.isEmpty())
break;
for (Iterator<Vertex2D> it = nodes.iterator(); it.hasNext(); )
{
Vertex2D v = it.next();
// These vertices are not bound to any triangles, so
// they must be removed, otherwise getSurroundingOTriangle
// may return a null pointer.
mesh.getQuadTree().remove(v);
}
// Process in pseudo-random order. There is at most maxNodes nodes
// on an edge, we choose an increment step greater than this value
// to try to split all edges.
int prime = PrimeFinder.nextPrime(maxNodes);
int imax = nodes.size();
while (imax % prime == 0)
prime = PrimeFinder.nextPrime(prime+1);
if (prime >= imax)
prime = 1;
int index = imax / 2;
int skippedNodes = 0;
for (int i = 0; i < imax; i++)
{
Vertex2D v = nodes.get(index);
VirtualHalfEdge2D vt = v.getSurroundingOTriangle(mesh);
if (!vt.split3(mesh, v, false))
skippedNodes++;
index += prime;
if (index >= imax)
index -= imax;
}
if (logger.isDebugEnabled())
{
logger.debug("Mesh now contains "+mesh.getTriangles().size()+" triangles");
if (checked > 0)
logger.debug(checked+" edges checked");
if (imax - skippedNodes > 0)
logger.debug((imax-skippedNodes)+" nodes added");
if (tooNearNodes > 0)
logger.debug(tooNearNodes+" nodes are too near from existing vertices and cannot be inserted");
if (skippedNodes > 0)
logger.debug(skippedNodes+" nodes cannot be inserted");
if (kdtreeSplit > 0)
logger.debug(kdtreeSplit+" quadtree cells split");
}
if (skippedNodes == nodes.size())
break;
}
logger.debug("Number of iterations to insert all nodes: "+nrIter);
}
|
diff --git a/dao-hibernate/src/main/java/org/apache/ode/daohib/bpel/CriteriaBuilder.java b/dao-hibernate/src/main/java/org/apache/ode/daohib/bpel/CriteriaBuilder.java
index 9a0df9d8b..fcf3bb8fe 100644
--- a/dao-hibernate/src/main/java/org/apache/ode/daohib/bpel/CriteriaBuilder.java
+++ b/dao-hibernate/src/main/java/org/apache/ode/daohib/bpel/CriteriaBuilder.java
@@ -1,409 +1,409 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.daohib.bpel;
import java.sql.Timestamp;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.BpelEventFilter;
import org.apache.ode.bpel.common.Filter;
import org.apache.ode.bpel.common.InstanceFilter;
import org.apache.ode.utils.ISO8601DateParser;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions;
/**
* Class used for converting "filter" objects into Hibernate
* {@link org.hibernate.Criteria} objects.
*/
class CriteriaBuilder {
static final Log __log = LogFactory.getLog(CriteriaBuilder.class);
/**
* Build a HQL query from an instance filter.
* @param filter filter
*/
Query buildHQLQuery(Session session, InstanceFilter filter) {
Map<String, Object> parameters = new HashMap<String, Object>();
StringBuffer query = new StringBuffer();
query.append("select pi from HProcessInstance as pi left join fetch pi.fault ");
if (filter != null) {
// Building each clause
ArrayList<String> clauses = new ArrayList<String>();
// iid filter
if ( filter.getIidFilter() != null ) {
StringBuffer filters = new StringBuffer();
List<String> iids = filter.getIidFilter();
for (int m = 0; m < iids.size(); m++) {
filters.append(" pi.id = :iid").append(m);
parameters.put("iid" + m, iids.get(m));
if (m < iids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// pid filter
if (filter.getPidFilter() != null) {
StringBuffer filters = new StringBuffer();
List<String> pids = filter.getPidFilter();
for (int m = 0; m < pids.size(); m++) {
- filters.append(" pi.process.id =").append(" :pid").append(m);
+ filters.append(" pi.process.processId =").append(" :pid").append(m);
parameters.put("pid" + m, pids.get(m));
if (m < pids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// name filter
if (filter.getNameFilter() != null) {
clauses.add(" pi.process.typeName like :pname");
parameters.put("pname", filter.getNameFilter().replaceAll("\\*", "%"));
}
// name space filter
if (filter.getNamespaceFilter() != null) {
clauses.add(" pi.process.typeNamespace like :pnamespace");
parameters.put("pnamespace", filter.getNamespaceFilter().replaceAll("\\*", "%"));
}
// started filter
if (filter.getStartedDateFilter() != null) {
for ( String ds : filter.getStartedDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.created " + dateFilter(ds));
}
}
// last-active filter
if (filter.getLastActiveDateFilter() != null) {
for ( String ds : filter.getLastActiveDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.lastActiveTime " + dateFilter(ds));
}
}
// status filter
if (filter.getStatusFilter() != null) {
StringBuffer filters = new StringBuffer();
List<Short> states = filter.convertFilterState();
for (int m = 0; m < states.size(); m++) {
filters.append(" pi.state = :pstate").append(m);
parameters.put("pstate" + m, states.get(m));
if (m < states.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters.toString() + ")");
}
// $property filter
if (filter.getPropertyValuesFilter() != null) {
Map<String,String> props = filter.getPropertyValuesFilter();
// join to correlation sets
query.append(" inner join pi.correlationSets as cs");
int i = 0;
for (String propKey : props.keySet()) {
i++;
// join to props for each prop
query.append(" inner join cs.properties as csp"+i);
// add clause for prop key and value
// spaces have to be escaped, might be better handled in InstanceFilter
String value = props.get(propKey).replaceAll(" ", " ");
if (propKey.startsWith("{")) {
String namespace = propKey.substring(1, propKey.lastIndexOf("}"));
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".namespace = :cspnamespace" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey.substring(propKey.lastIndexOf("}") + 1, propKey.length()));
parameters.put("cspnamespace" + i, namespace);
parameters.put("cspvalue" + i, value);
} else {
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey);
parameters.put("cspvalue" + i, value);
}
}
}
// order by
StringBuffer orderby = new StringBuffer("");
if (filter.getOrders() != null) {
orderby.append(" order by");
List<String> orders = filter.getOrders();
for (int m = 0; m < orders.size(); m++) {
String field = orders.get(m);
String ord = " asc";
if (field.startsWith("-")) {
ord = " desc";
}
String fieldName = " pi.id";
if (field.endsWith("name")) {
fieldName = " pi.process.typeName";
}
if (field.endsWith("namespace")) {
fieldName = " pi.process.typeNamespace";
}
if ( field.endsWith("version")) {
fieldName = " pi.process.version";
}
if ( field.endsWith("status")) {
fieldName = " pi.state";
}
if ( field.endsWith("started")) {
fieldName = " pi.created";
}
if ( field.endsWith("last-active")) {
fieldName = " pi.lastActiveTime";
}
orderby.append(fieldName + ord);
if (m < orders.size() - 1) orderby.append(", ");
}
}
// Preparing the statement
if (clauses.size() > 0) {
query.append(" where");
for (int m = 0; m < clauses.size(); m++) {
query.append(clauses.get(m));
if (m < clauses.size() - 1) query.append(" and");
}
}
query.append(orderby);
}
if (__log.isDebugEnabled()) {
__log.debug(query.toString());
}
Query q = session.createQuery(query.toString());
for (String p : parameters.keySet()) {
q.setParameter(p, parameters.get(p));
}
if (filter.getLimit() != 0) {
q.setMaxResults(filter.getLimit());
}
return q;
}
private static String dateFilter(String filter) {
String date = Filter.getDateWithoutOp(filter);
String op = filter.substring(0,filter.indexOf(date));
Date dt = null;
try {
dt = ISO8601DateParser.parse(date);
} catch (ParseException e) {
e.printStackTrace();
}
Timestamp ts = new Timestamp(dt.getTime());
return op + " '" + ts.toString() + "'";
}
/**
* Build a Hibernate {@link Criteria} from an instance filter.
* @param crit target (destination) criteria
* @param filter filter
*/
void buildCriteria(Criteria crit, InstanceFilter filter) {
Criteria processCrit = crit.createCriteria("process");
// Filtering on PID
List<String> pids = filter.getPidFilter();
if (pids != null && pids.size() > 0) {
Disjunction disj = Restrictions.disjunction();
for (String pid: pids) {
disj.add(Restrictions.eq("processId", pid));
}
processCrit.add(disj);
}
List<String> iids = filter.getIidFilter();
if (iids != null && iids.size() > 0) {
Disjunction disj = Restrictions.disjunction();
for (String iid: iids) {
disj.add(Restrictions.eq("id", new Long(iid)));
}
crit.add(disj);
}
// Filtering on name and namespace
if (filter.getNameFilter() != null) {
processCrit.add(Restrictions.like("typeName", filter.getNameFilter().replaceAll("\\*", "%")));
}
if (filter.getNamespaceFilter() != null) {
processCrit.add(Restrictions.like("typeNamespace", filter.getNamespaceFilter().replaceAll("\\*", "%")));
}
// Specific filter for status (using a disjunction between possible statuses)
if (filter.getStatusFilter() != null) {
List<Short> statuses = filter.convertFilterState();
Disjunction disj = Restrictions.disjunction();
for (short status : statuses) {
disj.add(Restrictions.eq("state", status));
}
crit.add(disj);
}
// Specific filter for started and last active dates.
if (filter.getStartedDateFilter() != null) {
for (String sdf : filter.getStartedDateFilter()) {
addFilterOnPrefixedDate(crit, sdf, "created");
}
}
if (filter.getLastActiveDateFilter() != null) {
for (String ladf : filter.getLastActiveDateFilter()) {
addFilterOnPrefixedDate(crit, ladf, "lastActiveTime");
}
}
// Specific filter for correlation properties
if (filter.getPropertyValuesFilter() != null) {
Criteria propCrit = crit.createCriteria("correlationSets").createCriteria("properties");
for (Map.Entry<String, String> corValue : filter.getPropertyValuesFilter().entrySet()) {
String propName = (String)corValue.getKey();
if (propName.startsWith("{")) {
String namespace = propName.substring(1, propName.lastIndexOf("}"));
propName = propName.substring(propName.lastIndexOf("}") + 1, propName.length());
propCrit.add(Restrictions.eq("name", propName))
.add(Restrictions.eq("namespace", namespace))
.add(Restrictions.eq("value", corValue.getValue()));
} else {
propCrit.add(Restrictions.eq("name", corValue.getKey()))
.add(Restrictions.eq("value", corValue.getValue()));
}
}
}
// Ordering
if (filter.orders != null) {
for (String key : filter.orders) {
boolean ascending = true;
String orderKey = key;
if (key.startsWith("+") || key.startsWith("-")) {
orderKey = key.substring(1, key.length());
if (key.startsWith("-")) ascending = false;
}
if ("name".equals(orderKey)) {
if (ascending) processCrit.addOrder(Property.forName("typeName").asc());
else processCrit.addOrder(Property.forName("typeName").desc());
} else if ("namespace".equals(orderKey)) {
if (ascending) processCrit.addOrder(Property.forName("typeNamespace").asc());
else processCrit.addOrder(Property.forName("typeNamespace").desc());
} else if ("pid".equals(orderKey)) {
if (ascending) processCrit.addOrder(Property.forName("processId").asc());
else processCrit.addOrder(Property.forName("processId").desc());
} else if ("version".equals(orderKey)) {
if (ascending) processCrit.addOrder(Property.forName("version").asc());
else processCrit.addOrder(Property.forName("version").desc());
} else if ("status".equals(orderKey)) {
if (ascending) crit.addOrder(Property.forName("state").asc());
else crit.addOrder(Property.forName("state").desc());
} else if ("started".equals(orderKey)) {
if (ascending) crit.addOrder(Property.forName("created").asc());
else crit.addOrder(Property.forName("created").desc());
} else if ("last-active".equals(orderKey)) {
if (ascending) crit.addOrder(Property.forName("lastActiveTime").asc());
else crit.addOrder(Property.forName("lastActiveTime").desc());
}
}
}
if (filter.getLimit() > 0) crit.setMaxResults(filter.getLimit());
}
/**
* Build criteria for an event filter.
* @param crit target criteria
* @param efilter event filter
*/
void buildCriteria(Criteria crit, BpelEventFilter efilter) {
if (efilter.getTypeFilter() != null)
crit.add(Restrictions.like("type", efilter.getTypeFilter().replace('*','%')));
// Specific filter for started and last active dates.
if (efilter.getTimestampFilter() != null) {
for (Filter.Restriction<Date> sdf : efilter.getTimestampFilter()) {
addFilterOnPrefixedDate(crit, sdf.op, sdf.value, "tstamp");
}
}
if (efilter.limit > 0) crit.setMaxResults(efilter.limit);
}
void addScopeFilter(Criteria crit, String scopeId) {
crit.add(Restrictions.eq("",scopeId));
}
static void addFilterOnPrefixedDate(Criteria crit, String prefixedDate, String dateAttribute) {
Date realDate = null;
try {
realDate = ISO8601DateParser.parse(getDateWithoutOp(prefixedDate));
} catch (ParseException e) {
// Never occurs, the deploy date format is pre-validated by the filter
}
addFilterOnPrefixedDate(crit,prefixedDate,realDate,dateAttribute);
}
static void addFilterOnPrefixedDate(Criteria crit, String op, Date date, String dateAttribute) {
if (op.startsWith("=")) {
crit.add(Restrictions.eq(dateAttribute, date));
} else if (op.startsWith("<=")) {
crit.add(Restrictions.le(dateAttribute, date));
} else if (op.startsWith(">=")) {
crit.add(Restrictions.ge(dateAttribute, date));
} else if (op.startsWith("<")) {
crit.add(Restrictions.lt(dateAttribute, date));
} else if (op.startsWith(">")) {
crit.add(Restrictions.gt(dateAttribute, date));
}
}
private static String getDateWithoutOp(String ddf) {
return Filter.getDateWithoutOp(ddf);
}
}
| true | true | Query buildHQLQuery(Session session, InstanceFilter filter) {
Map<String, Object> parameters = new HashMap<String, Object>();
StringBuffer query = new StringBuffer();
query.append("select pi from HProcessInstance as pi left join fetch pi.fault ");
if (filter != null) {
// Building each clause
ArrayList<String> clauses = new ArrayList<String>();
// iid filter
if ( filter.getIidFilter() != null ) {
StringBuffer filters = new StringBuffer();
List<String> iids = filter.getIidFilter();
for (int m = 0; m < iids.size(); m++) {
filters.append(" pi.id = :iid").append(m);
parameters.put("iid" + m, iids.get(m));
if (m < iids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// pid filter
if (filter.getPidFilter() != null) {
StringBuffer filters = new StringBuffer();
List<String> pids = filter.getPidFilter();
for (int m = 0; m < pids.size(); m++) {
filters.append(" pi.process.id =").append(" :pid").append(m);
parameters.put("pid" + m, pids.get(m));
if (m < pids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// name filter
if (filter.getNameFilter() != null) {
clauses.add(" pi.process.typeName like :pname");
parameters.put("pname", filter.getNameFilter().replaceAll("\\*", "%"));
}
// name space filter
if (filter.getNamespaceFilter() != null) {
clauses.add(" pi.process.typeNamespace like :pnamespace");
parameters.put("pnamespace", filter.getNamespaceFilter().replaceAll("\\*", "%"));
}
// started filter
if (filter.getStartedDateFilter() != null) {
for ( String ds : filter.getStartedDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.created " + dateFilter(ds));
}
}
// last-active filter
if (filter.getLastActiveDateFilter() != null) {
for ( String ds : filter.getLastActiveDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.lastActiveTime " + dateFilter(ds));
}
}
// status filter
if (filter.getStatusFilter() != null) {
StringBuffer filters = new StringBuffer();
List<Short> states = filter.convertFilterState();
for (int m = 0; m < states.size(); m++) {
filters.append(" pi.state = :pstate").append(m);
parameters.put("pstate" + m, states.get(m));
if (m < states.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters.toString() + ")");
}
// $property filter
if (filter.getPropertyValuesFilter() != null) {
Map<String,String> props = filter.getPropertyValuesFilter();
// join to correlation sets
query.append(" inner join pi.correlationSets as cs");
int i = 0;
for (String propKey : props.keySet()) {
i++;
// join to props for each prop
query.append(" inner join cs.properties as csp"+i);
// add clause for prop key and value
// spaces have to be escaped, might be better handled in InstanceFilter
String value = props.get(propKey).replaceAll(" ", " ");
if (propKey.startsWith("{")) {
String namespace = propKey.substring(1, propKey.lastIndexOf("}"));
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".namespace = :cspnamespace" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey.substring(propKey.lastIndexOf("}") + 1, propKey.length()));
parameters.put("cspnamespace" + i, namespace);
parameters.put("cspvalue" + i, value);
} else {
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey);
parameters.put("cspvalue" + i, value);
}
}
}
// order by
StringBuffer orderby = new StringBuffer("");
if (filter.getOrders() != null) {
orderby.append(" order by");
List<String> orders = filter.getOrders();
for (int m = 0; m < orders.size(); m++) {
String field = orders.get(m);
String ord = " asc";
if (field.startsWith("-")) {
ord = " desc";
}
String fieldName = " pi.id";
if (field.endsWith("name")) {
fieldName = " pi.process.typeName";
}
if (field.endsWith("namespace")) {
fieldName = " pi.process.typeNamespace";
}
if ( field.endsWith("version")) {
fieldName = " pi.process.version";
}
if ( field.endsWith("status")) {
fieldName = " pi.state";
}
if ( field.endsWith("started")) {
fieldName = " pi.created";
}
if ( field.endsWith("last-active")) {
fieldName = " pi.lastActiveTime";
}
orderby.append(fieldName + ord);
if (m < orders.size() - 1) orderby.append(", ");
}
}
// Preparing the statement
if (clauses.size() > 0) {
query.append(" where");
for (int m = 0; m < clauses.size(); m++) {
query.append(clauses.get(m));
if (m < clauses.size() - 1) query.append(" and");
}
}
query.append(orderby);
}
if (__log.isDebugEnabled()) {
__log.debug(query.toString());
}
Query q = session.createQuery(query.toString());
for (String p : parameters.keySet()) {
q.setParameter(p, parameters.get(p));
}
if (filter.getLimit() != 0) {
q.setMaxResults(filter.getLimit());
}
return q;
}
| Query buildHQLQuery(Session session, InstanceFilter filter) {
Map<String, Object> parameters = new HashMap<String, Object>();
StringBuffer query = new StringBuffer();
query.append("select pi from HProcessInstance as pi left join fetch pi.fault ");
if (filter != null) {
// Building each clause
ArrayList<String> clauses = new ArrayList<String>();
// iid filter
if ( filter.getIidFilter() != null ) {
StringBuffer filters = new StringBuffer();
List<String> iids = filter.getIidFilter();
for (int m = 0; m < iids.size(); m++) {
filters.append(" pi.id = :iid").append(m);
parameters.put("iid" + m, iids.get(m));
if (m < iids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// pid filter
if (filter.getPidFilter() != null) {
StringBuffer filters = new StringBuffer();
List<String> pids = filter.getPidFilter();
for (int m = 0; m < pids.size(); m++) {
filters.append(" pi.process.processId =").append(" :pid").append(m);
parameters.put("pid" + m, pids.get(m));
if (m < pids.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters + ")");
}
// name filter
if (filter.getNameFilter() != null) {
clauses.add(" pi.process.typeName like :pname");
parameters.put("pname", filter.getNameFilter().replaceAll("\\*", "%"));
}
// name space filter
if (filter.getNamespaceFilter() != null) {
clauses.add(" pi.process.typeNamespace like :pnamespace");
parameters.put("pnamespace", filter.getNamespaceFilter().replaceAll("\\*", "%"));
}
// started filter
if (filter.getStartedDateFilter() != null) {
for ( String ds : filter.getStartedDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.created " + dateFilter(ds));
}
}
// last-active filter
if (filter.getLastActiveDateFilter() != null) {
for ( String ds : filter.getLastActiveDateFilter() ) {
// named parameters not needed as date is parsed and is hence not
// prone to HQL injections
clauses.add(" pi.lastActiveTime " + dateFilter(ds));
}
}
// status filter
if (filter.getStatusFilter() != null) {
StringBuffer filters = new StringBuffer();
List<Short> states = filter.convertFilterState();
for (int m = 0; m < states.size(); m++) {
filters.append(" pi.state = :pstate").append(m);
parameters.put("pstate" + m, states.get(m));
if (m < states.size() - 1) filters.append(" or");
}
clauses.add(" (" + filters.toString() + ")");
}
// $property filter
if (filter.getPropertyValuesFilter() != null) {
Map<String,String> props = filter.getPropertyValuesFilter();
// join to correlation sets
query.append(" inner join pi.correlationSets as cs");
int i = 0;
for (String propKey : props.keySet()) {
i++;
// join to props for each prop
query.append(" inner join cs.properties as csp"+i);
// add clause for prop key and value
// spaces have to be escaped, might be better handled in InstanceFilter
String value = props.get(propKey).replaceAll(" ", " ");
if (propKey.startsWith("{")) {
String namespace = propKey.substring(1, propKey.lastIndexOf("}"));
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".namespace = :cspnamespace" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey.substring(propKey.lastIndexOf("}") + 1, propKey.length()));
parameters.put("cspnamespace" + i, namespace);
parameters.put("cspvalue" + i, value);
} else {
clauses.add(" csp" + i + ".name = :cspname" + i +
" and csp" + i + ".value = :cspvalue" + i);
parameters.put("cspname" + i, propKey);
parameters.put("cspvalue" + i, value);
}
}
}
// order by
StringBuffer orderby = new StringBuffer("");
if (filter.getOrders() != null) {
orderby.append(" order by");
List<String> orders = filter.getOrders();
for (int m = 0; m < orders.size(); m++) {
String field = orders.get(m);
String ord = " asc";
if (field.startsWith("-")) {
ord = " desc";
}
String fieldName = " pi.id";
if (field.endsWith("name")) {
fieldName = " pi.process.typeName";
}
if (field.endsWith("namespace")) {
fieldName = " pi.process.typeNamespace";
}
if ( field.endsWith("version")) {
fieldName = " pi.process.version";
}
if ( field.endsWith("status")) {
fieldName = " pi.state";
}
if ( field.endsWith("started")) {
fieldName = " pi.created";
}
if ( field.endsWith("last-active")) {
fieldName = " pi.lastActiveTime";
}
orderby.append(fieldName + ord);
if (m < orders.size() - 1) orderby.append(", ");
}
}
// Preparing the statement
if (clauses.size() > 0) {
query.append(" where");
for (int m = 0; m < clauses.size(); m++) {
query.append(clauses.get(m));
if (m < clauses.size() - 1) query.append(" and");
}
}
query.append(orderby);
}
if (__log.isDebugEnabled()) {
__log.debug(query.toString());
}
Query q = session.createQuery(query.toString());
for (String p : parameters.keySet()) {
q.setParameter(p, parameters.get(p));
}
if (filter.getLimit() != 0) {
q.setMaxResults(filter.getLimit());
}
return q;
}
|
diff --git a/sql12/fw/src/net/sourceforge/squirrel_sql/fw/dialects/InformixDialect.java b/sql12/fw/src/net/sourceforge/squirrel_sql/fw/dialects/InformixDialect.java
index 1f8af383d..5509e058f 100644
--- a/sql12/fw/src/net/sourceforge/squirrel_sql/fw/dialects/InformixDialect.java
+++ b/sql12/fw/src/net/sourceforge/squirrel_sql/fw/dialects/InformixDialect.java
@@ -1,414 +1,414 @@
/*
* Copyright (C) 2006 Rob Manning
* [email protected]
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package net.sourceforge.squirrel_sql.fw.dialects;
import java.sql.Types;
import net.sourceforge.squirrel_sql.fw.sql.IDatabaseObjectInfo;
import net.sourceforge.squirrel_sql.fw.sql.TableColumnInfo;
/**
* An extension to the standard Hibernate Informix dialect
*
*/
public class InformixDialect extends org.hibernate.dialect.InformixDialect
implements HibernateDialect {
public InformixDialect() {
super();
registerColumnType(Types.BIGINT, "integer");
registerColumnType(Types.BINARY, "byte");
registerColumnType(Types.BIT, "byte");
registerColumnType(Types.BLOB, "byte");
registerColumnType(Types.BOOLEAN, "smallint");
registerColumnType(Types.CHAR, 32511, "char($l)");
registerColumnType(Types.CHAR, "char(32511)");
registerColumnType(Types.CLOB, "text");
registerColumnType(Types.DATE, "date");
registerColumnType(Types.DECIMAL, "decimal($p,$s)");
registerColumnType(Types.DOUBLE, 15, "float($l)");
registerColumnType(Types.DOUBLE, "float(15)");
registerColumnType(Types.FLOAT, 15, "float($l)");
registerColumnType(Types.FLOAT, "float(15)");
registerColumnType(Types.INTEGER, "integer");
registerColumnType(Types.LONGVARBINARY, "byte");
registerColumnType(Types.LONGVARCHAR, "text");
registerColumnType(Types.NUMERIC, "numeric($p,$s)");
registerColumnType(Types.REAL, "real");
registerColumnType(Types.SMALLINT, "smallint");
registerColumnType(Types.TIME, "datetime hour to second");
- registerColumnType(Types.TIMESTAMP, "datetime");
+ registerColumnType(Types.TIMESTAMP, "datetime year to fraction");
registerColumnType(Types.TINYINT, "smallint");
registerColumnType(Types.VARBINARY, "byte");
registerColumnType(Types.VARCHAR, 255, "varchar($l)");
registerColumnType(Types.VARCHAR, "text");
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#canPasteTo(net.sourceforge.squirrel_sql.fw.sql.DatabaseObjectType)
*/
public boolean canPasteTo(IDatabaseObjectInfo info) {
return true;
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#supportsSchemasInTableDefinition()
*/
public boolean supportsSchemasInTableDefinition() {
return true;
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getLengthFunction()
*/
public String getLengthFunction(int dataType) {
return "length";
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getMaxFunction()
*/
public String getMaxFunction() {
return "max";
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getMaxPrecision(int)
*/
public int getMaxPrecision(int dataType) {
if (dataType == Types.DECIMAL || dataType == Types.NUMERIC) {
return 32;
}
if (dataType == Types.DOUBLE || dataType == Types.DOUBLE) {
return 16;
}
return 32;
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getMaxScale(int)
*/
public int getMaxScale(int dataType) {
return getMaxPrecision(dataType);
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getPrecisionDigits(int,
* int)
*/
public int getPrecisionDigits(int columnSize, int dataType) {
return columnSize;
}
/*
* (non-Javadoc)
*
* @see net.sourceforge.squirrel_sql.plugins.dbcopy.dialects.HibernateDialect#getColumnLength(int,
* int)
*/
public int getColumnLength(int columnSize, int dataType) {
return columnSize;
}
/**
* The string which identifies this dialect in the dialect chooser.
*
* @return a descriptive name that tells the user what database this dialect
* is design to work with.
*/
public String getDisplayName() {
return "Informix";
}
/**
* Returns boolean value indicating whether or not this dialect supports the
* specified database product/version.
*
* @param databaseProductName
* the name of the database as reported by
* DatabaseMetaData.getDatabaseProductName()
* @param databaseProductVersion
* the version of the database as reported by
* DatabaseMetaData.getDatabaseProductVersion()
* @return true if this dialect can be used for the specified product name
* and version; false otherwise.
*/
public boolean supportsProduct(String databaseProductName,
String databaseProductVersion) {
if (databaseProductName == null) {
return false;
}
if (databaseProductName.trim().startsWith("Informix")) {
// We don't yet have the need to discriminate by version.
return true;
}
return false;
}
/**
* Returns the SQL statement to use to add a column to the specified table
* using the information about the new column specified by info.
*
* @param info
* information about the new column such as type, name, etc.
*
* @return
* @throws UnsupportedOperationException
* if the database doesn't support adding columns after a table
* has already been created.
*/
public String[] getColumnAddSQL(TableColumnInfo info)
throws UnsupportedOperationException {
return new String[] { DialectUtils.getColumnAddSQL(info, this, true,
false) };
}
/**
* Returns a boolean value indicating whether or not this database dialect
* supports dropping columns from tables.
*
* @return true if the database supports dropping columns; false otherwise.
*/
public boolean supportsDropColumn() {
return true;
}
/**
* Returns the SQL that forms the command to drop the specified colum in the
* specified table.
*
* @param tableName
* the name of the table that has the column
* @param columnName
* the name of the column to drop.
* @return
* @throws UnsupportedOperationException
* if the database doesn't support dropping columns.
*/
public String getColumnDropSQL(String tableName, String columnName) {
return DialectUtils.getColumnDropSQL(tableName, columnName);
}
/**
* Returns the SQL that forms the command to drop the specified table. If
* cascade contraints is supported by the dialect and cascadeConstraints is
* true, then a drop statement with cascade constraints clause will be
* formed.
*
* @param tableName
* the table to drop
* @param cascadeConstraints
* whether or not to drop any FKs that may reference the
* specified table.
*
* @return the drop SQL command.
*/
public String getTableDropSQL(String tableName, boolean cascadeConstraints) {
return DialectUtils
.getTableDropSQL(tableName, true, cascadeConstraints);
}
/**
* Returns the SQL that forms the command to add a primary key to the
* specified table composed of the given column names.
*
* CREATE UNIQUE INDEX test_index ON test_table (test_field);
*
* ALTER TABLE test_table ADD CONSTRAINT PRIMARY KEY (test_field) CONSTRAINT
* test_constraint;
*
* alter table table_name add constraint primary key (column_names)
* constraint pkName
*
* @param pkName
* the name of the constraint
* @param columnNames
* the columns that form the key
* @return
*/
public String[] getAddPrimaryKeySQL(String pkName, TableColumnInfo[] columns) {
return new String[] {
DialectUtils.getAddIndexSQL(pkName, true, columns),
DialectUtils.getAddPrimaryKeySQL(pkName, columns, true)
};
}
/**
* Returns a boolean value indicating whether or not this dialect supports
* adding comments to columns.
*
* @return true if column comments are supported; false otherwise.
*/
public boolean supportsColumnComment() {
return false;
}
/**
* Returns the SQL statement to use to add a comment to the specified column
* of the specified table.
*
* @param info
* information about the column such as type, name, etc.
* @return
* @throws UnsupportedOperationException
* if the database doesn't support annotating columns with a
* comment.
*/
public String getColumnCommentAlterSQL(TableColumnInfo info)
throws UnsupportedOperationException
{
int featureId = DialectUtils.COLUMN_COMMENT_TYPE;
String msg = DialectUtils.getUnsupportedMessage(this, featureId);
throw new UnsupportedOperationException(msg);
}
/**
* Returns a boolean value indicating whether or not this database dialect
* supports changing a column from null to not-null and vice versa.
*
* @return true if the database supports dropping columns; false otherwise.
*/
public boolean supportsAlterColumnNull() {
return true;
}
/**
* Returns the SQL used to alter the specified column to not allow null
* values
*
* @param info
* the column to modify
* @return the SQL to execute
*/
public String getColumnNullableAlterSQL(TableColumnInfo info) {
String alterClause = DialectUtils.MODIFY_CLAUSE;
return DialectUtils.getColumnNullableAlterSQL(info,
this,
alterClause,
true);
}
/**
* Returns a boolean value indicating whether or not this database dialect
* supports renaming columns.
*
* @return true if the database supports changing the name of columns; false
* otherwise.
*/
public boolean supportsRenameColumn() {
return true;
}
/**
* Returns the SQL that is used to change the column name.
*
*
* @param from
* the TableColumnInfo as it is
* @param to
* the TableColumnInfo as it wants to be
*
* @return the SQL to make the change
*/
public String getColumnNameAlterSQL(TableColumnInfo from, TableColumnInfo to) {
return DialectUtils.getColumnRenameSQL(from, to);
}
/**
* Returns the SQL that is used to change the column type.
*
* alter table table_name modify column_name datatype
*
* @param from
* the TableColumnInfo as it is
* @param to
* the TableColumnInfo as it wants to be
*
* @return the SQL to make the change
* @throw UnsupportedOperationException if the database doesn't support
* modifying column types.
*/
public String getColumnTypeAlterSQL(TableColumnInfo from, TableColumnInfo to)
throws UnsupportedOperationException {
String alterClause = DialectUtils.MODIFY_CLAUSE;
String setClause = null;
return DialectUtils.getColumnTypeAlterSQL(this,
alterClause,
setClause,
false,
from,
to);
}
/**
* Returns a boolean value indicating whether or not this database dialect
* supports changing a column's default value.
*
* @return true if the database supports modifying column defaults; false
* otherwise
*/
public boolean supportsAlterColumnDefault() {
return true;
}
/**
* Returns the SQL command to change the specified column's default value
*
* @param info
* the column to modify and it's default value.
* @return SQL to make the change
*/
public String getColumnDefaultAlterSQL(TableColumnInfo info) {
String alterClause = DialectUtils.MODIFY_CLAUSE;
String defaultClause = DialectUtils.DEFAULT_CLAUSE;
return DialectUtils.getColumnDefaultAlterSQL(this,
info,
alterClause,
true,
defaultClause);
}
/**
* Returns the SQL command to drop the specified table's primary key.
*
* @param pkName
* the name of the primary key that should be dropped
* @param tableName
* the name of the table whose primary key should be dropped
* @return
*/
public String getDropPrimaryKeySQL(String pkName, String tableName) {
return DialectUtils.getDropPrimaryKeySQL(pkName, tableName, true);
}
}
| true | true | public InformixDialect() {
super();
registerColumnType(Types.BIGINT, "integer");
registerColumnType(Types.BINARY, "byte");
registerColumnType(Types.BIT, "byte");
registerColumnType(Types.BLOB, "byte");
registerColumnType(Types.BOOLEAN, "smallint");
registerColumnType(Types.CHAR, 32511, "char($l)");
registerColumnType(Types.CHAR, "char(32511)");
registerColumnType(Types.CLOB, "text");
registerColumnType(Types.DATE, "date");
registerColumnType(Types.DECIMAL, "decimal($p,$s)");
registerColumnType(Types.DOUBLE, 15, "float($l)");
registerColumnType(Types.DOUBLE, "float(15)");
registerColumnType(Types.FLOAT, 15, "float($l)");
registerColumnType(Types.FLOAT, "float(15)");
registerColumnType(Types.INTEGER, "integer");
registerColumnType(Types.LONGVARBINARY, "byte");
registerColumnType(Types.LONGVARCHAR, "text");
registerColumnType(Types.NUMERIC, "numeric($p,$s)");
registerColumnType(Types.REAL, "real");
registerColumnType(Types.SMALLINT, "smallint");
registerColumnType(Types.TIME, "datetime hour to second");
registerColumnType(Types.TIMESTAMP, "datetime");
registerColumnType(Types.TINYINT, "smallint");
registerColumnType(Types.VARBINARY, "byte");
registerColumnType(Types.VARCHAR, 255, "varchar($l)");
registerColumnType(Types.VARCHAR, "text");
}
| public InformixDialect() {
super();
registerColumnType(Types.BIGINT, "integer");
registerColumnType(Types.BINARY, "byte");
registerColumnType(Types.BIT, "byte");
registerColumnType(Types.BLOB, "byte");
registerColumnType(Types.BOOLEAN, "smallint");
registerColumnType(Types.CHAR, 32511, "char($l)");
registerColumnType(Types.CHAR, "char(32511)");
registerColumnType(Types.CLOB, "text");
registerColumnType(Types.DATE, "date");
registerColumnType(Types.DECIMAL, "decimal($p,$s)");
registerColumnType(Types.DOUBLE, 15, "float($l)");
registerColumnType(Types.DOUBLE, "float(15)");
registerColumnType(Types.FLOAT, 15, "float($l)");
registerColumnType(Types.FLOAT, "float(15)");
registerColumnType(Types.INTEGER, "integer");
registerColumnType(Types.LONGVARBINARY, "byte");
registerColumnType(Types.LONGVARCHAR, "text");
registerColumnType(Types.NUMERIC, "numeric($p,$s)");
registerColumnType(Types.REAL, "real");
registerColumnType(Types.SMALLINT, "smallint");
registerColumnType(Types.TIME, "datetime hour to second");
registerColumnType(Types.TIMESTAMP, "datetime year to fraction");
registerColumnType(Types.TINYINT, "smallint");
registerColumnType(Types.VARBINARY, "byte");
registerColumnType(Types.VARCHAR, 255, "varchar($l)");
registerColumnType(Types.VARCHAR, "text");
}
|
diff --git a/stilts-activity-ui/src/main/java/org/purl/wf4ever/astrotaverna/tjoin/ui/serviceprovider/StiltsServiceProvider.java b/stilts-activity-ui/src/main/java/org/purl/wf4ever/astrotaverna/tjoin/ui/serviceprovider/StiltsServiceProvider.java
index d277f79..31fa251 100644
--- a/stilts-activity-ui/src/main/java/org/purl/wf4ever/astrotaverna/tjoin/ui/serviceprovider/StiltsServiceProvider.java
+++ b/stilts-activity-ui/src/main/java/org/purl/wf4ever/astrotaverna/tjoin/ui/serviceprovider/StiltsServiceProvider.java
@@ -1,187 +1,187 @@
package org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.swing.Icon;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.AddColumnByExpressionServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.AddSkyCoordsServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.CheckTemplateFillerServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.CoordTransformationServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.FormatConversionServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.GetListFromColumnServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.ResolveCoordsServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.SelectColumnsServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.SelectRowsServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.StiltsServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.StiltsServiceIcon;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.TcatListServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.TcatServiceDesc;
import org.purl.wf4ever.astrotaverna.tjoin.ui.serviceprovider.TemplateFillerServiceDesc;
import net.sf.taverna.t2.servicedescriptions.ServiceDescription;
import net.sf.taverna.t2.servicedescriptions.ServiceDescriptionProvider;
public class StiltsServiceProvider implements ServiceDescriptionProvider {
//OJO!!!!!!!!!!!!!!!!!!!!
//write down a real URI
private static final URI providerId = URI
.create("http://www.iaa.es/service-provider/tjoin");
/**
* Do the actual search for services. Return using the callBack parameter.
*/
@SuppressWarnings("unchecked")
public void findServiceDescriptionsAsync(
FindServiceDescriptionsCallBack callBack) {
// Use callback.status() for long-running searches
// callBack.status("Resolving example services");
List<ServiceDescription> results = new ArrayList<ServiceDescription>();
// FIXME: Implement the actual service search/lookup instead
// of dummy for-loop
//for (int i = 1; i <= 5; i++) {
// StiltsServiceDesc service = new StiltsServiceDesc();
// // Populate the service description bean
// service.setExampleString("Example " + i);
// service.setExampleUri(URI.create("http://localhost:8192/service"));
// // Optional: set description
// service.setDescription("Service example number " + i);
// results.add(service);
//}
StiltsServiceDesc service = new StiltsServiceDesc();
service.setTypeOFInput("String");
service.setDescription("Join two VOTables");
results.add(service);
//ServiceDescription
SelectColumnsServiceDesc service2 = new SelectColumnsServiceDesc();
service2.setTypeOfInput("String");
service2.setTypeOfFilter("Column names");
service2.setDescription("Select columns from a VOTable");
results.add(service2);
SelectRowsServiceDesc service3 = new SelectRowsServiceDesc();
service3.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service3.setDescription("Select rows from a VOTable");
results.add(service3);
CoordTransformationServiceDesc service4 = new CoordTransformationServiceDesc();
service4.setTypeOfInput("String");
- service4.setDescription("Add Coordinates units conversion");
+ service4.setDescription("Add Coordinate units conversion");
results.add(service4);
FormatConversionServiceDesc service5 = new FormatConversionServiceDesc();
service5.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service5.setDescription("VOTable format conversion");
results.add(service5);
AddColumnByExpressionServiceDesc service6 = new AddColumnByExpressionServiceDesc();
service6.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service6.setDescription("Add column to VOTable");
results.add(service6);
AddSkyCoordsServiceDesc service7 = new AddSkyCoordsServiceDesc();
service7.setTypeOfInput("String");
service7.setDescription("Coordinates reference system transformation");
results.add(service7);
ResolveCoordsServiceDesc service8 = new ResolveCoordsServiceDesc();
service8.setTypeOfInput("String");
service8.setDescription("Resolve object name into coordinates");
results.add(service8);
TcatServiceDesc service9 = new TcatServiceDesc();
service9.setTypeOfInput("String");
service9.setDescription("Concatenate two VOTables");
results.add(service9);
TcatListServiceDesc service10 = new TcatListServiceDesc();
service10.setTypeOfInput("String");
service10.setDescription("Concatenate a list of VOTables");
results.add(service10);
GetListFromColumnServiceDesc service11 = new GetListFromColumnServiceDesc();
service11.setTypeOfInput("String");
service11.setDescription("Extract column from a VOTable as a list");
results.add(service11);
TemplateFillerServiceDesc service12 = new TemplateFillerServiceDesc();
service12.setTypeOfInput("String");
service12.setDescription("Fill template file from VOTable");
results.add(service12);
CheckTemplateFillerServiceDesc service13 = new CheckTemplateFillerServiceDesc();
service13.setTypeOfInput("String");
service13.setDescription("Validate template file against VOTable");
results.add(service13);
AddCommonRowToVOTableServiceDesc service14 = new AddCommonRowToVOTableServiceDesc();
service14.setTypeOfInput("String");
service14.setCommonRowPosition("Left");
service14.setDescription("Add common fields to a VOTable");
results.add(service14);
CrossMatch2ServiceDesc service15 = new CrossMatch2ServiceDesc();
service15.setTypeOfInput("String");
service15.setDescription("Crossmatch two VOTables");
results.add(service15);
//change done in wf4ever
//Put here additional descriptions for other services
//............
//............
//............
//............
// partialResults() can also be called several times from inside
// for-loop if the full search takes a long time
callBack.partialResults(results);
// No more results will be coming
callBack.finished();
}
/**
* Icon for service provider
*/
public Icon getIcon() {
return StiltsServiceIcon.getIcon();
}
/**
* Name of service provider, appears in right click for 'Remove service
* provider'
*/
public String getName() {
return "My astro services";
}
@Override
public String toString() {
return getName();
}
public String getId() {
return providerId.toASCIIString();
}
}
| true | true | public void findServiceDescriptionsAsync(
FindServiceDescriptionsCallBack callBack) {
// Use callback.status() for long-running searches
// callBack.status("Resolving example services");
List<ServiceDescription> results = new ArrayList<ServiceDescription>();
// FIXME: Implement the actual service search/lookup instead
// of dummy for-loop
//for (int i = 1; i <= 5; i++) {
// StiltsServiceDesc service = new StiltsServiceDesc();
// // Populate the service description bean
// service.setExampleString("Example " + i);
// service.setExampleUri(URI.create("http://localhost:8192/service"));
// // Optional: set description
// service.setDescription("Service example number " + i);
// results.add(service);
//}
StiltsServiceDesc service = new StiltsServiceDesc();
service.setTypeOFInput("String");
service.setDescription("Join two VOTables");
results.add(service);
//ServiceDescription
SelectColumnsServiceDesc service2 = new SelectColumnsServiceDesc();
service2.setTypeOfInput("String");
service2.setTypeOfFilter("Column names");
service2.setDescription("Select columns from a VOTable");
results.add(service2);
SelectRowsServiceDesc service3 = new SelectRowsServiceDesc();
service3.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service3.setDescription("Select rows from a VOTable");
results.add(service3);
CoordTransformationServiceDesc service4 = new CoordTransformationServiceDesc();
service4.setTypeOfInput("String");
service4.setDescription("Add Coordinates units conversion");
results.add(service4);
FormatConversionServiceDesc service5 = new FormatConversionServiceDesc();
service5.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service5.setDescription("VOTable format conversion");
results.add(service5);
AddColumnByExpressionServiceDesc service6 = new AddColumnByExpressionServiceDesc();
service6.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service6.setDescription("Add column to VOTable");
results.add(service6);
AddSkyCoordsServiceDesc service7 = new AddSkyCoordsServiceDesc();
service7.setTypeOfInput("String");
service7.setDescription("Coordinates reference system transformation");
results.add(service7);
ResolveCoordsServiceDesc service8 = new ResolveCoordsServiceDesc();
service8.setTypeOfInput("String");
service8.setDescription("Resolve object name into coordinates");
results.add(service8);
TcatServiceDesc service9 = new TcatServiceDesc();
service9.setTypeOfInput("String");
service9.setDescription("Concatenate two VOTables");
results.add(service9);
TcatListServiceDesc service10 = new TcatListServiceDesc();
service10.setTypeOfInput("String");
service10.setDescription("Concatenate a list of VOTables");
results.add(service10);
GetListFromColumnServiceDesc service11 = new GetListFromColumnServiceDesc();
service11.setTypeOfInput("String");
service11.setDescription("Extract column from a VOTable as a list");
results.add(service11);
TemplateFillerServiceDesc service12 = new TemplateFillerServiceDesc();
service12.setTypeOfInput("String");
service12.setDescription("Fill template file from VOTable");
results.add(service12);
CheckTemplateFillerServiceDesc service13 = new CheckTemplateFillerServiceDesc();
service13.setTypeOfInput("String");
service13.setDescription("Validate template file against VOTable");
results.add(service13);
AddCommonRowToVOTableServiceDesc service14 = new AddCommonRowToVOTableServiceDesc();
service14.setTypeOfInput("String");
service14.setCommonRowPosition("Left");
service14.setDescription("Add common fields to a VOTable");
results.add(service14);
CrossMatch2ServiceDesc service15 = new CrossMatch2ServiceDesc();
service15.setTypeOfInput("String");
service15.setDescription("Crossmatch two VOTables");
results.add(service15);
//change done in wf4ever
//Put here additional descriptions for other services
//............
//............
//............
//............
// partialResults() can also be called several times from inside
// for-loop if the full search takes a long time
callBack.partialResults(results);
// No more results will be coming
callBack.finished();
}
| public void findServiceDescriptionsAsync(
FindServiceDescriptionsCallBack callBack) {
// Use callback.status() for long-running searches
// callBack.status("Resolving example services");
List<ServiceDescription> results = new ArrayList<ServiceDescription>();
// FIXME: Implement the actual service search/lookup instead
// of dummy for-loop
//for (int i = 1; i <= 5; i++) {
// StiltsServiceDesc service = new StiltsServiceDesc();
// // Populate the service description bean
// service.setExampleString("Example " + i);
// service.setExampleUri(URI.create("http://localhost:8192/service"));
// // Optional: set description
// service.setDescription("Service example number " + i);
// results.add(service);
//}
StiltsServiceDesc service = new StiltsServiceDesc();
service.setTypeOFInput("String");
service.setDescription("Join two VOTables");
results.add(service);
//ServiceDescription
SelectColumnsServiceDesc service2 = new SelectColumnsServiceDesc();
service2.setTypeOfInput("String");
service2.setTypeOfFilter("Column names");
service2.setDescription("Select columns from a VOTable");
results.add(service2);
SelectRowsServiceDesc service3 = new SelectRowsServiceDesc();
service3.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service3.setDescription("Select rows from a VOTable");
results.add(service3);
CoordTransformationServiceDesc service4 = new CoordTransformationServiceDesc();
service4.setTypeOfInput("String");
service4.setDescription("Add Coordinate units conversion");
results.add(service4);
FormatConversionServiceDesc service5 = new FormatConversionServiceDesc();
service5.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service5.setDescription("VOTable format conversion");
results.add(service5);
AddColumnByExpressionServiceDesc service6 = new AddColumnByExpressionServiceDesc();
service6.setTypeOfInput("String");
//service3.setTypeOfFilter("Column names");
service6.setDescription("Add column to VOTable");
results.add(service6);
AddSkyCoordsServiceDesc service7 = new AddSkyCoordsServiceDesc();
service7.setTypeOfInput("String");
service7.setDescription("Coordinates reference system transformation");
results.add(service7);
ResolveCoordsServiceDesc service8 = new ResolveCoordsServiceDesc();
service8.setTypeOfInput("String");
service8.setDescription("Resolve object name into coordinates");
results.add(service8);
TcatServiceDesc service9 = new TcatServiceDesc();
service9.setTypeOfInput("String");
service9.setDescription("Concatenate two VOTables");
results.add(service9);
TcatListServiceDesc service10 = new TcatListServiceDesc();
service10.setTypeOfInput("String");
service10.setDescription("Concatenate a list of VOTables");
results.add(service10);
GetListFromColumnServiceDesc service11 = new GetListFromColumnServiceDesc();
service11.setTypeOfInput("String");
service11.setDescription("Extract column from a VOTable as a list");
results.add(service11);
TemplateFillerServiceDesc service12 = new TemplateFillerServiceDesc();
service12.setTypeOfInput("String");
service12.setDescription("Fill template file from VOTable");
results.add(service12);
CheckTemplateFillerServiceDesc service13 = new CheckTemplateFillerServiceDesc();
service13.setTypeOfInput("String");
service13.setDescription("Validate template file against VOTable");
results.add(service13);
AddCommonRowToVOTableServiceDesc service14 = new AddCommonRowToVOTableServiceDesc();
service14.setTypeOfInput("String");
service14.setCommonRowPosition("Left");
service14.setDescription("Add common fields to a VOTable");
results.add(service14);
CrossMatch2ServiceDesc service15 = new CrossMatch2ServiceDesc();
service15.setTypeOfInput("String");
service15.setDescription("Crossmatch two VOTables");
results.add(service15);
//change done in wf4ever
//Put here additional descriptions for other services
//............
//............
//............
//............
// partialResults() can also be called several times from inside
// for-loop if the full search takes a long time
callBack.partialResults(results);
// No more results will be coming
callBack.finished();
}
|
diff --git a/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/isolation/InterferenceTest.java b/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/isolation/InterferenceTest.java
index b9bee2133..e569b27eb 100644
--- a/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/isolation/InterferenceTest.java
+++ b/src/examples/simple/src/main/java/org/apache/accumulo/examples/simple/isolation/InterferenceTest.java
@@ -1,176 +1,176 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.examples.simple.isolation;
import java.util.HashSet;
import java.util.Map.Entry;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.IsolatedScanner;
import org.apache.accumulo.core.client.MutationsRejectedException;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
/**
* This example shows how a concurrent reader and writer can interfere with each other. It creates two threads that run forever reading and writing to the same
* table.
*
* When the example is run with isolation enabled, no interference will be observed.
*
* When the example is run with out isolation, the reader will see partial mutations of a row.
*
*/
public class InterferenceTest {
private static final int NUM_ROWS = 500;
private static final int NUM_COLUMNS = 113; // scanner batches 1000 by default, so make num columns not a multiple of 10
private static long iterations;
private static final Logger log = Logger.getLogger(InterferenceTest.class);
static class Writer implements Runnable {
private BatchWriter bw;
Writer(BatchWriter bw) {
this.bw = bw;
}
@Override
public void run() {
int row = 0;
int value = 0;
for (long i = 0; i < iterations; i++) {
Mutation m = new Mutation(new Text(String.format("%03d", row)));
row = (row + 1) % NUM_ROWS;
for (int cq = 0; cq < NUM_COLUMNS; cq++)
m.put(new Text("000"), new Text(String.format("%04d", cq)), new Value(("" + value).getBytes()));
value++;
try {
bw.addMutation(m);
} catch (MutationsRejectedException e) {
e.printStackTrace();
System.exit(-1);
}
}
try {
bw.close();
} catch (MutationsRejectedException e) {
log.error(e, e);
}
}
}
static class Reader implements Runnable {
private Scanner scanner;
volatile boolean stop = false;
Reader(Scanner scanner) {
this.scanner = scanner;
}
@Override
public void run() {
- while (stop) {
+ while (!stop) {
ByteSequence row = null;
int count = 0;
// all columns in a row should have the same value,
// use this hash set to track that
HashSet<String> values = new HashSet<String>();
for (Entry<Key,Value> entry : scanner) {
if (row == null)
row = entry.getKey().getRowData();
if (!row.equals(entry.getKey().getRowData())) {
if (count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
row = entry.getKey().getRowData();
count = 0;
values.clear();
}
count++;
values.add(entry.getValue().toString());
}
if (count > 0 && count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
}
}
public void stopNow() {
stop = true;
}
}
public static void main(String[] args) throws Exception {
if (args.length != 7) {
System.out.println("Usage : " + InterferenceTest.class.getName() + " <instance name> <zookeepers> <user> <password> <table> <iterations> true|false");
System.out.println(" The last argument determines if scans should be isolated. When false, expect to see errors");
return;
}
ZooKeeperInstance zki = new ZooKeeperInstance(args[0], args[1]);
Connector conn = zki.getConnector(args[2], args[3].getBytes());
String table = args[4];
iterations = Long.parseLong(args[5]);
if (iterations < 1)
iterations = Long.MAX_VALUE;
if (!conn.tableOperations().exists(table))
conn.tableOperations().create(table);
Thread writer = new Thread(new Writer(conn.createBatchWriter(table, 10000000, 60000l, 3)));
writer.start();
Reader r;
if (Boolean.parseBoolean(args[6]))
r = new Reader(new IsolatedScanner(conn.createScanner(table, Constants.NO_AUTHS)));
else
r = new Reader(conn.createScanner(table, Constants.NO_AUTHS));
Thread reader;
reader = new Thread(r);
reader.start();
writer.join();
r.stopNow();
reader.join();
System.out.println("finished");
}
}
| true | true | public void run() {
while (stop) {
ByteSequence row = null;
int count = 0;
// all columns in a row should have the same value,
// use this hash set to track that
HashSet<String> values = new HashSet<String>();
for (Entry<Key,Value> entry : scanner) {
if (row == null)
row = entry.getKey().getRowData();
if (!row.equals(entry.getKey().getRowData())) {
if (count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
row = entry.getKey().getRowData();
count = 0;
values.clear();
}
count++;
values.add(entry.getValue().toString());
}
if (count > 0 && count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
}
}
| public void run() {
while (!stop) {
ByteSequence row = null;
int count = 0;
// all columns in a row should have the same value,
// use this hash set to track that
HashSet<String> values = new HashSet<String>();
for (Entry<Key,Value> entry : scanner) {
if (row == null)
row = entry.getKey().getRowData();
if (!row.equals(entry.getKey().getRowData())) {
if (count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
row = entry.getKey().getRowData();
count = 0;
values.clear();
}
count++;
values.add(entry.getValue().toString());
}
if (count > 0 && count != NUM_COLUMNS)
System.err.println("ERROR Did not see " + NUM_COLUMNS + " columns in row " + row);
if (values.size() > 1)
System.err.println("ERROR Columns in row " + row + " had multiple values " + values);
}
}
|
diff --git a/src/main/java/lcmc/gui/resources/BlockDevInfo.java b/src/main/java/lcmc/gui/resources/BlockDevInfo.java
index 896994c0..e9a44e78 100644
--- a/src/main/java/lcmc/gui/resources/BlockDevInfo.java
+++ b/src/main/java/lcmc/gui/resources/BlockDevInfo.java
@@ -1,2680 +1,2686 @@
/*
* This file is part of DRBD Management Console by LINBIT HA-Solutions GmbH
* written by Rasto Levrinc.
*
* Copyright (C) 2009-2010, LINBIT HA-Solutions GmbH.
* Copyright (C) 2009-2010, Rasto Levrinc
*
* DRBD Management Console is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* DRBD Management Console is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with drbd; see the file COPYING. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package lcmc.gui.resources;
import lcmc.Exceptions;
import lcmc.gui.Browser;
import lcmc.gui.HostBrowser;
import lcmc.gui.ClusterBrowser;
import lcmc.gui.DrbdGraph;
import lcmc.gui.dialog.lvm.PVCreate;
import lcmc.gui.dialog.lvm.PVRemove;
import lcmc.gui.dialog.lvm.VGCreate;
import lcmc.gui.dialog.lvm.VGRemove;
import lcmc.gui.dialog.lvm.LVCreate;
import lcmc.gui.dialog.lvm.LVResize;
import lcmc.gui.dialog.lvm.LVSnapshot;
import lcmc.gui.dialog.drbd.DrbdLog;
import lcmc.data.ConfigData;
import lcmc.utilities.MyMenu;
import lcmc.utilities.MyMenuItem;
import lcmc.utilities.UpdatableItem;
import lcmc.utilities.Tools;
import lcmc.utilities.DRBD;
import lcmc.utilities.LVM;
import lcmc.utilities.ButtonCallback;
import lcmc.gui.widget.Widget;
import lcmc.data.Host;
import lcmc.data.Subtext;
import lcmc.data.Cluster;
import lcmc.data.DRBDtestData;
import lcmc.data.resources.BlockDevice;
import lcmc.data.AccessMode;
import lcmc.data.DrbdXML;
import java.awt.Dimension;
import java.awt.Component;
import java.awt.Font;
import java.awt.Color;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.BorderLayout;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Map;
import java.util.Set;
import java.util.HashMap;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.concurrent.CountDownLatch;
import javax.swing.ImageIcon;
import javax.swing.JPanel;
import javax.swing.JComponent;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.BoxLayout;
import javax.swing.JScrollPane;
/**
* This class holds info data for a block device.
*/
public final class BlockDevInfo extends EditableInfo {
/** DRBD resource in which this block device is member. */
private DrbdVolumeInfo drbdVolumeInfo;
/** Map from paremeters to the fact if the last entered value was
* correct. */
private final Map<String, Boolean> paramCorrectValueMap =
new HashMap<String, Boolean>();
/** Cache for the info panel. */
private JComponent infoPanel = null;
/** Keyword that denotes flexible meta-disk. */
private static final String DRBD_MD_TYPE_FLEXIBLE = "Flexible";
/** Internal parameter name of drbd meta-disk. */
private static final String DRBD_MD_PARAM = "DrbdMetaDisk";
/** Internal parameter name of drbd meta-disk index. */
private static final String DRBD_MD_INDEX_PARAM = "DrbdMetaDiskIndex";
/** Large harddisk icon. */
public static final ImageIcon HARDDISK_ICON_LARGE = Tools.createImageIcon(
Tools.getDefault("BlockDevInfo.HarddiskIconLarge"));
/** Large harddisk with drbd icon. */
public static final ImageIcon HARDDISK_DRBD_ICON_LARGE =
Tools.createImageIcon(
Tools.getDefault("BlockDevInfo.HarddiskDRBDIconLarge"));
/** Large no harddisk icon. */
public static final ImageIcon NO_HARDDISK_ICON_LARGE =
Tools.createImageIcon(
Tools.getDefault("BlockDevInfo.NoHarddiskIconLarge"));
/** Harddisk icon. */
public static final ImageIcon HARDDISK_ICON = Tools.createImageIcon(
Tools.getDefault("BlockDevInfo.HarddiskIcon"));
/** Meta-disk subtext. */
private static final Subtext METADISK_SUBTEXT =
new Subtext("meta-disk", Color.BLUE, Color.BLACK);
/** Swap subtext. */
private static final Subtext SWAP_SUBTEXT =
new Subtext("swap", Color.BLUE, Color.BLACK);
/** Mounted subtext. */
private static final Subtext MOUNTED_SUBTEXT =
new Subtext("mounted", Color.BLUE, Color.BLACK);
/** Physical volume subtext. */
private static final Subtext PHYSICAL_VOLUME_SUBTEXT =
new Subtext("PV", Color.BLUE, Color.GREEN);
/** String length after the cut. */
private static final int MAX_RIGHT_CORNER_STRING_LENGTH = 28;
/** String that is displayed as a tool tip for disabled menu item. */
static final String NO_DRBD_RESOURCE_STRING =
"it is not a drbd resource";
/** Allow two primaries paramater. */
private static final String ALLOW_TWO_PRIMARIES = "allow-two-primaries";
/** Name of the create PV menu item. */
private static final String PV_CREATE_MENU_ITEM = "Create PV";
/** Description. */
private static final String PV_CREATE_MENU_DESCRIPTION =
"Initialize a disk or partition for use by LVM.";
/** Name of the remove PV menu item. */
private static final String PV_REMOVE_MENU_ITEM = "Remove PV";
/** Description. */
private static final String PV_REMOVE_MENU_DESCRIPTION =
"Remove a physical volume.";
/** Name of the create VG menu item. */
private static final String VG_CREATE_MENU_ITEM = "Create VG";
/** Description create VG. */
private static final String VG_CREATE_MENU_DESCRIPTION =
"Create a volume group.";
/** Name of the remove VG menu item. */
private static final String VG_REMOVE_MENU_ITEM = "Remove VG";
/** Description. */
private static final String VG_REMOVE_MENU_DESCRIPTION =
"Remove a volume group.";
/** Name of the create menu item. */
private static final String LV_CREATE_MENU_ITEM = "Create LV in VG ";
/** Description create LV. */
private static final String LV_CREATE_MENU_DESCRIPTION =
"Create a logical volume.";
/** Name of the LV remove menu item. */
private static final String LV_REMOVE_MENU_ITEM = "Remove LV";
/** Description for LV remove. */
private static final String LV_REMOVE_MENU_DESCRIPTION =
"Remove the logical volume";
/** Name of the resize menu item. */
private static final String LV_RESIZE_MENU_ITEM = "Resize LV";
/** Description LVM resize. */
private static final String LV_RESIZE_MENU_DESCRIPTION =
"Resize the logical volume";
/** Name of the snapshot menu item. */
private static final String LV_SNAPSHOT_MENU_ITEM = "Create LV Snapshot ";
/** Description LV snapshot. */
private static final String LV_SNAPSHOT_MENU_DESCRIPTION =
"Create a snapshot of the logical volume.";
/** "Proxy up" text for graph. */
public static final String PROXY_UP = "Proxy Up";
/** "Proxy down" text for graph. */
private static final String PROXY_DOWN = "Proxy Down";
/**
* Prepares a new <code>BlockDevInfo</code> object.
*
* @param name
* name that will be shown in the tree
* @param blockDevice
* bock device
*/
public BlockDevInfo(final String name,
final BlockDevice blockDevice,
final Browser browser) {
super(name, browser);
setResource(blockDevice);
}
/**
* Returns object of the other block device that is connected via drbd
* to this block device.
*/
public BlockDevInfo getOtherBlockDevInfo() {
final DrbdVolumeInfo dvi = drbdVolumeInfo;
if (dvi == null) {
return null;
}
return dvi.getOtherBlockDevInfo(this);
}
/** Returns browser object of this info. */
@Override
public HostBrowser getBrowser() {
return (HostBrowser) super.getBrowser();
}
/** Sets info panel of this block devices. TODO: explain why. */
void setInfoPanel(final JComponent infoPanel) {
this.infoPanel = infoPanel;
}
/**
* Remove this block device.
*
* TODO: check this
*/
@Override
public void removeMyself(final boolean testOnly) {
getBlockDevice().setValue(DRBD_MD_PARAM, null);
getBlockDevice().setValue(DRBD_MD_INDEX_PARAM, null);
super.removeMyself(testOnly);
if (!testOnly) {
removeNode();
}
infoPanel = null;
}
/** Returns host on which is this block device. */
public Host getHost() {
return getBrowser().getHost();
}
/** Returns block device icon for the menu. */
@Override
public ImageIcon getMenuIcon(final boolean testOnly) {
return BlockDevInfo.HARDDISK_ICON;
}
/** Returns info of this block device as string. */
@Override
String getInfo() {
final StringBuilder ret = new StringBuilder(120);
ret.append("Host : ");
ret.append(getHost().getName());
ret.append("\nDevice : ");
ret.append(getBlockDevice().getName());
ret.append("\nMeta disk : ");
ret.append(getBlockDevice().isDrbdMetaDisk());
ret.append("\nSize : ");
ret.append(getBlockDevice().getBlockSize());
ret.append(" blocks");
if (getBlockDevice().getMountedOn() == null) {
ret.append("\nnot mounted");
} else {
ret.append("\nMounted on : ");
ret.append(getBlockDevice().getMountedOn());
ret.append("\nType : ");
ret.append(getBlockDevice().getFsType());
if (getUsed() >= 0) {
ret.append("\nUsed: : ");
ret.append(getUsed());
ret.append('%');
}
}
if (getBlockDevice().isDrbd()) {
ret.append("\nConnection state: ");
ret.append(getBlockDevice().getConnectionState());
ret.append("\nNode state : ");
ret.append(getBlockDevice().getNodeState());
ret.append("\nDisk state : ");
ret.append(getBlockDevice().getDiskState());
ret.append('\n');
}
return ret.toString();
}
/** Append hierarchy of block devices in the string buffer using HTML. */
private void appendBlockDeviceHierarchy(final BlockDevice bd,
final StringBuilder tt,
final int shift) {
String tab = "";
for (int i = 0; i != shift; ++i) {
tab += " ";
}
/* physical volumes */
String vg = null;
String selectedPV = null;
if (bd.isVolumeGroupOnPhysicalVolume()) {
vg = bd.getVolumeGroupOnPhysicalVolume();
selectedPV = bd.getName();
} else if (isLVM()) {
vg = bd.getVolumeGroup();
}
if (vg != null) {
for (final BlockDevice pv : getHost().getPhysicalVolumes(vg)) {
if (pv.equals(selectedPV)) {
tt.append("<b>");
tt.append(tab + pv);
tt.append("</b>");
} else {
tt.append(tab + pv);
}
tt.append('\n');
}
}
/* volume groups */
String selectedLV = null;
if (vg != null) {
if (bd.isVolumeGroupOnPhysicalVolume()) {
tt.append("<b>");
tt.append(" " + tab + vg);
tt.append("</b>\n");
} else if (isLVM()) {
tt.append(" " + tab);
tt.append(vg);
tt.append('\n');
selectedLV = bd.getName();
}
final Set<String> lvs =
getHost().getLogicalVolumesFromVolumeGroup(vg);
if (lvs != null) {
for (final String lv : lvs) {
tt.append(" " + tab);
final String lvName = "/dev/" + vg + "/" + lv;
if (lvName.equals(selectedLV)) {
if (bd.isDrbd()) {
tt.append(lv);
tt.append("\n");
final BlockDevice drbdBD = bd.getDrbdBlockDevice();
if (drbdBD != null) {
appendBlockDeviceHierarchy(drbdBD,
tt,
shift + 3);
}
} else {
tt.append("<b>");
tt.append(lv);
tt.append("</b>\n");
}
} else {
tt.append(lv);
tt.append('\n');
}
}
}
} else {
final BlockDevice drbdBD = bd.getDrbdBlockDevice();
if (drbdBD != null) {
tt.append(tab + bd.getName());
tt.append('\n');
appendBlockDeviceHierarchy(drbdBD, tt, shift + 1);
} else {
tt.append("<b>");
tt.append(tab + bd.getName());
tt.append("</b>\n");
}
}
}
/** Returns tool tip for this block device. */
@Override
public String getToolTipForGraph(final boolean testOnly) {
final StringBuilder tt = new StringBuilder(60);
final BlockDevice bd = getBlockDevice();
tt.append("<pre>");
appendBlockDeviceHierarchy(bd, tt, 0);
tt.append("</pre>");
if (bd.isDrbdMetaDisk()) {
tt.append(" (Meta Disk)\n");
for (final BlockDevice mb
: getBlockDevice().getMetaDiskOfBlockDevices()) {
tt.append(" of ");
tt.append(mb.getName());
tt.append('\n');
}
}
if (bd.isDrbd()) {
if (getHost().isDrbdStatus()) {
String cs = bd.getConnectionState();
String st = bd.getNodeState();
String ds = bd.getDiskState();
if (cs == null) {
cs = "not available";
}
if (st == null) {
st = "not available";
}
if (ds == null) {
ds = "not available";
}
tt.append("\n<table><tr><td><b>cs:</b></td><td>");
tt.append(cs);
tt.append("</td></tr><tr><td><b>ro:</b></td><td>");
tt.append(st);
tt.append("</td></tr><tr><td><b>ds:</b></td><td>");
tt.append(ds);
tt.append("</td></tr></table>");
} else {
tt.append('\n');
tt.append(Tools.getString("HostBrowser.Hb.NoInfoAvailable"));
}
}
return tt.toString();
}
/** Creates config for one node. */
String drbdBDConfig(final String resource,
final String drbdDevice,
final boolean volumesAvailable)
throws Exceptions.DrbdConfigException {
if (drbdDevice == null) {
throw new Exceptions.DrbdConfigException(
"Drbd device not defined for host "
+ getHost().getName()
+ " (" + resource + ")");
}
if (getBlockDevice().getName() == null) {
throw new Exceptions.DrbdConfigException(
"Block device not defined for host "
+ getHost().getName()
+ " (" + resource + ")");
}
final StringBuilder config = new StringBuilder(120);
String tabs;
if (volumesAvailable) {
tabs = "\t\t\t";
} else {
tabs = "\t\t";
}
config.append(tabs + "device\t\t");
config.append(drbdDevice);
config.append(";\n" + tabs + "disk\t\t");
config.append(getBlockDevice().getName());
config.append(";\n" + tabs);
config.append(getBlockDevice().getMetaDiskString(
getComboBoxValue(DRBD_MD_PARAM),
getComboBoxValue(DRBD_MD_INDEX_PARAM)));
config.append(';');
return config.toString();
}
/** Sets whether this block device is drbd. */
void setDrbd(final boolean drbd) {
getBlockDevice().setDrbd(drbd);
}
/** Returns section of this paramter. */
@Override
protected String getSection(final String param) {
return getBlockDevice().getSection(param);
}
/** Returns possible choices of this paramter. */
@Override
protected Object[] getPossibleChoices(final String param) {
return getBlockDevice().getPossibleChoices(param);
}
/** Returns default value of this paramter. */
protected Object getDefaultValue(final String param) {
return "<select>";
}
/** Returns combobox for this parameter. */
@Override
protected Widget createWidget(final String param,
final String prefix,
final int width) {
Widget paramWi;
if (DRBD_MD_INDEX_PARAM.equals(param)) {
final Widget gwi = super.createWidget(param, prefix, width);
paramWi = gwi;
//SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// gwi.setAlwaysEditable(true);
// }
//});
} else {
final Widget gwi = super.createWidget(param, prefix, width);
paramWi = gwi;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
gwi.setEditable(false);
}
});
}
return paramWi;
}
/** Returns true if a paramter is correct. */
@Override
protected boolean checkParam(final String param, String value) {
boolean ret = true;
if (value == null) {
value = "";
}
if ("".equals(value) && isRequired(param)) {
ret = false;
} else if (DRBD_MD_PARAM.equals(param)) {
if (infoPanel != null) {
if (!getHost().isServerStatusLatch()) {
final boolean internal = "internal".equals(value);
final Widget ind = getWidget(DRBD_MD_INDEX_PARAM, null);
final Widget indW = getWidget(DRBD_MD_INDEX_PARAM,
"wizard");
if (internal) {
ind.setValue(DRBD_MD_TYPE_FLEXIBLE);
if (indW != null) {
indW.setValue(DRBD_MD_TYPE_FLEXIBLE);
}
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
ind.setEnabled(!internal);
}
});
if (indW != null) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
indW.setEnabled(!internal);
}
});
}
}
}
} else if (DRBD_MD_INDEX_PARAM.equals(param)) {
if (getBrowser().getUsedPorts().contains(value)
&& !value.equals(getBlockDevice().getValue(param))) {
ret = false;
}
final Pattern p = Pattern.compile(".*\\D.*");
final Matcher m = p.matcher(value);
if (m.matches() && !DRBD_MD_TYPE_FLEXIBLE.equals(value)) {
ret = false;
}
}
paramCorrectValueMap.remove(param);
paramCorrectValueMap.put(param, ret);
return ret;
}
/** Returns whether this parameter is required. */
@Override
protected boolean isRequired(final String param) {
return true;
}
/** Returns whether this parameter is advanced. */
@Override
protected boolean isAdvanced(final String param) {
return false;
}
/** Returns access type of this parameter. */
@Override
protected ConfigData.AccessType getAccessType(final String param) {
return ConfigData.AccessType.ADMIN;
}
/** Whether the parameter should be enabled. */
@Override
protected String isEnabled(final String param) {
return null;
}
/** Whether the parameter should be enabled only in advanced mode. */
@Override
protected boolean isEnabledOnlyInAdvancedMode(final String param) {
return false;
}
/** Returns whether this type is integer. */
@Override
protected boolean isInteger(final String param) {
return false;
}
/** Returns whether this type is a label. */
@Override
protected boolean isLabel(final String param) {
return false;
}
/** Returns whether this parameter is of a time type. */
@Override
protected boolean isTimeType(final String param) {
/* not required */
return false;
}
/** Returns whether this parameter is a checkbox. */
@Override
protected boolean isCheckBox(final String param) {
return false;
}
/** Returns type of this parameter. */
@Override
protected String getParamType(final String param) {
return null;
}
/** Returns the regexp of the parameter. */
@Override
protected String getParamRegexp(final String param) {
return null;
}
/** Returns possible choices for the parameter. */
@Override
protected Object[] getParamPossibleChoices(final String param) {
if (DRBD_MD_PARAM.equals(param)) {
/* meta disk */
final StringInfo internalMetaDisk =
new StringInfo(Tools.getString(
"HostBrowser.MetaDisk.Internal"),
"internal",
getBrowser());
final String defaultMetaDiskString = internalMetaDisk.toString();
getBrowser().lockBlockDevInfosRead();
@SuppressWarnings("unchecked")
final Info[] blockDevices = getAvailableBlockDevicesForMetaDisk(
internalMetaDisk,
getName(),
getBrowser().getBlockDevicesNode().children());
getBrowser().unlockBlockDevInfosRead();
getBlockDevice().setDefaultValue(DRBD_MD_PARAM,
defaultMetaDiskString);
return blockDevices;
} else if (DRBD_MD_INDEX_PARAM.equals(param)) {
String defaultMetaDiskIndex = getBlockDevice().getValue(
DRBD_MD_INDEX_PARAM);
if ("internal".equals(defaultMetaDiskIndex)) {
defaultMetaDiskIndex =
Tools.getString("HostBrowser.MetaDisk.Internal");
}
String[] indeces = new String[11];
int index = 0;
if (defaultMetaDiskIndex == null) {
defaultMetaDiskIndex = DRBD_MD_TYPE_FLEXIBLE;
} else if (!DRBD_MD_TYPE_FLEXIBLE.equals(defaultMetaDiskIndex)) {
index = Integer.valueOf(defaultMetaDiskIndex) - 5;
if (index < 0) {
index = 0;
}
}
indeces[0] = DRBD_MD_TYPE_FLEXIBLE;
for (int i = 1; i < 11; i++) {
indeces[i] = Integer.toString(index);
index++;
}
getBlockDevice().setDefaultValue(DRBD_MD_INDEX_PARAM,
DRBD_MD_TYPE_FLEXIBLE);
return indeces;
}
return null;
}
/** Returns default for this parameter. */
@Override
protected String getParamDefault(final String param) {
return getBlockDevice().getDefaultValue(param);
}
/** Returns preferred value of this parameter. */
@Override
protected String getParamPreferred(final String param) {
return getBlockDevice().getPreferredValue(param);
}
/** Return whether the value is correct from the cache. */
@Override
protected boolean checkParamCache(final String param) {
final Boolean cv = paramCorrectValueMap.get(param);
if (cv == null) {
return false;
}
return cv.booleanValue();
}
/** Returns block devices that are available for drbd meta-disk. */
protected Info[] getAvailableBlockDevicesForMetaDisk(
final Info defaultValue,
final String serviceName,
final Enumeration<DefaultMutableTreeNode> e) {
final List<Info> list = new ArrayList<Info>();
final String savedMetaDisk = getBlockDevice().getValue(DRBD_MD_PARAM);
if (defaultValue != null) {
list.add(defaultValue);
}
while (e.hasMoreElements()) {
final BlockDevInfo bdi =
(BlockDevInfo) e.nextElement().getUserObject();
final BlockDevice bd = bdi.getBlockDevice();
if (bd.toString().equals(savedMetaDisk)
|| (!bd.isDrbd() && !bd.isUsedByCRM() && !bd.isMounted())) {
list.add(bdi);
}
}
return list.toArray(new Info[list.size()]);
}
/** DRBD attach. */
void attach(final boolean testOnly) {
DRBD.attach(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** DRBD detach. */
void detach(final boolean testOnly) {
DRBD.detach(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** DRBD connect. */
void connect(final boolean testOnly) {
DRBD.connect(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
null,
testOnly);
}
/** DRBD disconnect. */
void disconnect(final boolean testOnly) {
DRBD.disconnect(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
null,
testOnly);
}
/** DRBD pause sync. */
void pauseSync(final boolean testOnly) {
DRBD.pauseSync(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** DRBD resume sync. */
void resumeSync(final boolean testOnly) {
DRBD.resumeSync(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** DRBD up command. */
void drbdUp(final boolean testOnly) {
DRBD.up(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Sets this drbd block device to the primary state. */
void setPrimary(final boolean testOnly) {
DRBD.setPrimary(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Sets this drbd block device to the secondary state. */
public void setSecondary(final boolean testOnly) {
DRBD.setSecondary(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Initializes drbd block device. */
void initDrbd(final boolean testOnly) {
DRBD.initDrbd(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Make filesystem. */
public void makeFilesystem(final String filesystem,
final boolean testOnly) {
DRBD.makeFilesystem(getHost(),
getDrbdVolumeInfo().getDevice(),
filesystem,
testOnly);
}
/** Initialize a physical volume. */
public boolean pvCreate(final boolean testOnly) {
String device;
if (getBlockDevice().isDrbd()) {
device = drbdVolumeInfo.getDevice();
} else {
device = getBlockDevice().getName();
}
final boolean ret = LVM.pvCreate(getHost(), device, testOnly);
if (ret) {
getBlockDevice().setVolumeGroupOnPhysicalVolume("");
}
return ret;
}
/** Remove a physical volume. */
public boolean pvRemove(final boolean testOnly) {
String device;
if (getBlockDevice().isDrbd()) {
device = drbdVolumeInfo.getDevice();
} else {
device = getBlockDevice().getName();
}
final boolean ret = LVM.pvRemove(getHost(), device, testOnly);
if (ret) {
if (getBlockDevice().isDrbd()) {
getBlockDevice().getDrbdBlockDevice()
.setVolumeGroupOnPhysicalVolume(null);
} else {
getBlockDevice().setVolumeGroupOnPhysicalVolume(null);
}
}
return ret;
}
/** Remove a logical volume. */
public boolean lvRemove(final boolean testOnly) {
final String device = getBlockDevice().getName();
return LVM.lvRemove(getHost(), device, testOnly);
}
/** Make snapshot. */
public boolean lvSnapshot(final String snapshotName,
final String size,
final boolean testOnly) {
final String device = getBlockDevice().getName();
return LVM.lvSnapshot(getHost(), snapshotName, device, size, testOnly);
}
/** Skip initial full sync. */
public void skipInitialFullSync(final boolean testOnly) {
DRBD.skipInitialFullSync(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Force primary. */
public void forcePrimary(final boolean testOnly) {
DRBD.forcePrimary(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Invalidate the block device. */
void invalidateBD(final boolean testOnly) {
DRBD.invalidate(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Discard the data. */
void discardData(final boolean testOnly) {
DRBD.discardData(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
null,
testOnly);
}
/** Start on-line verification. */
void verify(final boolean testOnly) {
DRBD.verify(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Resize DRBD. */
public boolean resizeDrbd(final boolean testOnly) {
return DRBD.resize(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
/** Returns the graphical view. */
@Override
public JPanel getGraphicalView() {
if (getBlockDevice().isDrbd()) {
getBrowser().getDrbdGraph().getDrbdInfo().setSelectedNode(this);
}
return getBrowser().getDrbdGraph().getDrbdInfo().getGraphicalView();
}
/** Set the terminal panel. */
@Override
protected void setTerminalPanel() {
if (getHost() != null) {
Tools.getGUIData().setTerminalPanel(getHost().getTerminalPanel());
}
}
/** Returns the info panel. */
@Override
public JComponent getInfoPanel() {
return getInfoPanelBD();
}
/** Returns all parameters. */
@Override
public String[] getParametersFromXML() {
final String[] params = {
DRBD_MD_PARAM,
DRBD_MD_INDEX_PARAM,
};
return params;
}
/** Apply all fields. */
public void apply(final boolean testOnly) {
if (!testOnly) {
final String[] params = getParametersFromXML();
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
getApplyButton().setEnabled(false);
getRevertButton().setEnabled(false);
}
});
getInfoPanel();
waitForInfoPanel();
if (getBlockDevice().getMetaDisk() != null) {
getBlockDevice().getMetaDisk().removeMetadiskOfBlockDevice(
getBlockDevice());
}
getBlockDevice().setNew(false);
storeComboBoxValues(params);
final Object o = getWidget(DRBD_MD_PARAM, null).getValue();
if (Tools.isStringInfoClass(o)) {
getBlockDevice().setMetaDisk(null); /* internal */
} else {
final BlockDevice metaDisk =
((BlockDevInfo) o).getBlockDevice();
getBlockDevice().setMetaDisk(metaDisk);
}
getBrowser().getDrbdGraph().getDrbdInfo().setAllApplyButtons();
}
}
/** Returns block device panel. */
JComponent getInfoPanelBD() {
if (infoPanel != null) {
return infoPanel;
}
final BlockDevInfo thisClass = this;
final ButtonCallback buttonCallback = new ButtonCallback() {
private volatile boolean mouseStillOver = false;
/**
* Whether the whole thing should be enabled.
*/
@Override
public boolean isEnabled() {
return true;
}
@Override
public void mouseOut() {
if (!isEnabled()) {
return;
}
mouseStillOver = false;
final DrbdGraph drbdGraph = getBrowser().getDrbdGraph();
drbdGraph.stopTestAnimation(getApplyButton());
getApplyButton().setToolTipText(null);
}
@Override
public void mouseOver() {
if (!isEnabled()) {
return;
}
mouseStillOver = true;
getApplyButton().setToolTipText(Tools.getString(
"ClusterBrowser.StartingDRBDtest"));
getApplyButton().setToolTipBackground(Tools.getDefaultColor(
"ClusterBrowser.Test.Tooltip.Background"));
Tools.sleep(250);
if (!mouseStillOver) {
return;
}
mouseStillOver = false;
final CountDownLatch startTestLatch = new CountDownLatch(1);
final DrbdGraph drbdGraph = getBrowser().getDrbdGraph();
drbdGraph.startTestAnimation(getApplyButton(), startTestLatch);
getBrowser().drbdtestLockAcquire();
thisClass.setDRBDtestData(null);
apply(true);
final Map<Host, String> testOutput =
new LinkedHashMap<Host, String>();
try {
getBrowser().getDrbdGraph().getDrbdInfo().createDrbdConfig(
true);
for (final Host h
: getHost().getCluster().getHostsArray()) {
DRBD.adjust(h, DRBD.ALL, null, true);
testOutput.put(h, DRBD.getDRBDtest());
}
} catch (Exceptions.DrbdConfigException dce) {
Tools.appError("config failed");
}
final DRBDtestData dtd = new DRBDtestData(testOutput);
getApplyButton().setToolTipText(dtd.getToolTip());
thisClass.setDRBDtestData(dtd);
getBrowser().drbdtestLockRelease();
startTestLatch.countDown();
}
};
initApplyButton(buttonCallback);
final JPanel mainPanel = new JPanel();
mainPanel.setBackground(HostBrowser.PANEL_BACKGROUND);
mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.Y_AXIS));
final JPanel buttonPanel = new JPanel(new BorderLayout());
buttonPanel.setBackground(HostBrowser.BUTTON_PANEL_BACKGROUND);
buttonPanel.setMinimumSize(new Dimension(0, 50));
buttonPanel.setPreferredSize(new Dimension(0, 50));
buttonPanel.setMaximumSize(new Dimension(Short.MAX_VALUE, 50));
final JPanel optionsPanel = new JPanel();
optionsPanel.setBackground(HostBrowser.PANEL_BACKGROUND);
optionsPanel.setLayout(new BoxLayout(optionsPanel, BoxLayout.Y_AXIS));
optionsPanel.setAlignmentX(Component.LEFT_ALIGNMENT);
/* Actions */
buttonPanel.add(getActionsButton(), BorderLayout.EAST);
if (getBlockDevice().isDrbd()) {
final String[] params = getParametersFromXML();
addParams(optionsPanel,
params,
Tools.getDefaultSize("HostBrowser.DrbdDevLabelWidth"),
Tools.getDefaultSize("HostBrowser.DrbdDevFieldWidth"),
null);
/* apply button */
getApplyButton().addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
final Thread thread = new Thread(new Runnable() {
@Override
public void run() {
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
getApplyButton().setEnabled(false);
getRevertButton().setEnabled(false);
}
});
getBrowser().getClusterBrowser().drbdStatusLock();
try {
getBrowser().getDrbdGraph().getDrbdInfo()
.createDrbdConfig(false);
for (final Host h
: getHost().getCluster().getHostsArray()) {
DRBD.adjust(h, DRBD.ALL, null, false);
}
} catch (Exceptions.DrbdConfigException e) {
getBrowser()
.getClusterBrowser()
.drbdStatusUnlock();
Tools.appError("config failed");
return;
}
apply(false);
getBrowser().getClusterBrowser().drbdStatusUnlock();
}
});
thread.start();
}
});
getRevertButton().addActionListener(
new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
final Thread thread = new Thread(new Runnable() {
@Override
public void run() {
revert();
}
});
thread.start();
}
}
);
addApplyButton(buttonPanel);
addRevertButton(buttonPanel);
}
/* info */
final Font f = new Font("Monospaced",
Font.PLAIN,
Tools.getConfigData().scaled(12));
final JPanel riaPanel = new JPanel();
riaPanel.setBackground(HostBrowser.PANEL_BACKGROUND);
riaPanel.setAlignmentX(Component.LEFT_ALIGNMENT);
riaPanel.add(super.getInfoPanel());
mainPanel.add(riaPanel);
mainPanel.add(optionsPanel);
final JPanel newPanel = new JPanel();
newPanel.setBackground(HostBrowser.PANEL_BACKGROUND);
newPanel.setLayout(new BoxLayout(newPanel, BoxLayout.Y_AXIS));
newPanel.add(buttonPanel);
newPanel.add(new JScrollPane(mainPanel));
infoPanel = newPanel;
infoPanelDone();
setApplyButtons(null, getParametersFromXML());
return infoPanel;
}
/** TODO: dead code? */
@Override
boolean selectAutomaticallyInTreeMenu() {
return infoPanel == null;
}
/** Sets drbd resource for this block device. */
void setDrbdVolumeInfo(final DrbdVolumeInfo drbdVolumeInfo) {
this.drbdVolumeInfo = drbdVolumeInfo;
}
/** Returns drbd resource info in which this block device is member. */
public DrbdVolumeInfo getDrbdVolumeInfo() {
return drbdVolumeInfo;
}
/** Returns block device resource object. */
public BlockDevice getBlockDevice() {
return (BlockDevice) getResource();
}
/** Removes this block device from drbd data structures. */
public void removeFromDrbd() {
setDrbd(false);
getBlockDevice().setDrbdBlockDevice(null);
setDrbdVolumeInfo(null);
}
/** Returns short description of the parameter. */
@Override
protected String getParamShortDesc(final String param) {
return Tools.getString(param);
}
/** Returns long description of the parameter. */
@Override
protected String getParamLongDesc(final String param) {
return Tools.getString(param + ".Long");
}
/** Returns 'add drbd resource' menu item. */
private MyMenuItem addDrbdResourceMenuItem(final BlockDevInfo oBdi,
final boolean testOnly) {
final BlockDevInfo thisClass = this;
return new MyMenuItem(oBdi.toString(),
null,
null,
new AccessMode(ConfigData.AccessType.ADMIN,
false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public void action() {
final DrbdInfo drbdInfo =
getBrowser().getDrbdGraph().getDrbdInfo();
cleanup();
setInfoPanel(null);
oBdi.cleanup();
oBdi.setInfoPanel(null);
drbdInfo.addDrbdVolume(thisClass,
oBdi,
true,
testOnly);
}
};
}
/** Returns 'PV create' menu item. */
private MyMenuItem getPVCreateItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(PV_CREATE_MENU_ITEM,
null,
PV_CREATE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return !isLVM()
&& !getBlockDevice().isPhysicalVolume()
&& !getBlockDevice().isDrbdPhysicalVolume();
}
@Override
public String enablePredicate() {
if (getBlockDevice().isDrbd()
&& !getBlockDevice().isPrimary()) {
return "must be primary";
}
return null;
}
@Override
public void action() {
final PVCreate pvCreate = new PVCreate(thisBDI);
while (true) {
pvCreate.showDialog();
if (pvCreate.isPressedCancelButton()) {
pvCreate.cancelDialog();
return;
} else if (pvCreate.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Returns 'PV remove' menu item. */
private MyMenuItem getPVRemoveItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(PV_REMOVE_MENU_ITEM,
null,
PV_REMOVE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
BlockDevice bd;
if (getBlockDevice().isDrbd()) {
if (!getBlockDevice().isPrimary()) {
return false;
}
bd = getBlockDevice().getDrbdBlockDevice();
if (bd == null) {
return false;
}
} else {
bd = getBlockDevice();
}
return bd.isPhysicalVolume()
&& !bd.isVolumeGroupOnPhysicalVolume();
}
@Override
public String enablePredicate() {
if (getBlockDevice().isDrbd()
&& !getBlockDevice().isDrbdPhysicalVolume()) {
return "DRBD is on it";
}
return null;
}
@Override
public void action() {
final PVRemove pvRemove = new PVRemove(thisBDI);
while (true) {
pvRemove.showDialog();
if (pvRemove.isPressedCancelButton()) {
pvRemove.cancelDialog();
return;
} else if (pvRemove.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Returns 'lv create' menu item. */
private MyMenuItem getVGCreateItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(
VG_CREATE_MENU_ITEM,
null,
VG_CREATE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
public boolean visiblePredicate() {
BlockDevice bd;
if (getBlockDevice().isDrbd()) {
if (!getBlockDevice().isPrimary()) {
return false;
}
bd = getBlockDevice().getDrbdBlockDevice();
if (bd == null) {
return false;
}
} else {
bd = getBlockDevice();
}
return bd.isPhysicalVolume()
&& !bd.isVolumeGroupOnPhysicalVolume();
}
public String enablePredicate() {
return null;
}
@Override
public void action() {
final VGCreate vgCreate = new VGCreate(getHost(), thisBDI);
while (true) {
vgCreate.showDialog();
if (vgCreate.isPressedCancelButton()) {
vgCreate.cancelDialog();
return;
} else if (vgCreate.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Returns 'VG remove' menu item. */
private MyMenuItem getVGRemoveItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(VG_REMOVE_MENU_ITEM,
null,
VG_REMOVE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
BlockDevice bd;
if (getBlockDevice().isDrbd()) {
if (!getBlockDevice().isPrimary()) {
return false;
}
bd = getBlockDevice().getDrbdBlockDevice();
if (bd == null) {
return false;
}
} else {
bd = getBlockDevice();
}
return bd.isVolumeGroupOnPhysicalVolume();
}
@Override
public String enablePredicate() {
String vg;
final BlockDevice bd = getBlockDevice();
final BlockDevice drbdBD = bd.getDrbdBlockDevice();
if (drbdBD == null) {
vg = bd.getVolumeGroupOnPhysicalVolume();
} else {
vg = drbdBD.getVolumeGroupOnPhysicalVolume();
}
if (getHost().getLogicalVolumesFromVolumeGroup(vg) != null) {
return "has LV on it";
}
return null;
}
@Override
public void action() {
final VGRemove vgRemove = new VGRemove(thisBDI);
while (true) {
vgRemove.showDialog();
if (vgRemove.isPressedCancelButton()) {
vgRemove.cancelDialog();
return;
} else if (vgRemove.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Returns 'lv create' menu item. */
private MyMenuItem getLVCreateItem() {
String name = LV_CREATE_MENU_ITEM;
final String vgName = getBlockDevice().getVolumeGroup();
if (vgName != null) {
name += vgName;
}
final BlockDevInfo thisClass = this;
final MyMenuItem mi = new MyMenuItem(
name,
null,
LV_CREATE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
private String getVolumeGroup() {
BlockDevice bd;
if (getBlockDevice().isDrbd()) {
bd = getBlockDevice().getDrbdBlockDevice();
if (bd == null) {
return null;
}
} else {
bd = getBlockDevice();
}
final String vg = bd.getVolumeGroup();
if (vg == null) {
/* vg on pv */
return bd.getVolumeGroupOnPhysicalVolume();
} else {
/* lv on vg */
return vg;
}
}
@Override
public boolean visiblePredicate() {
final String vg = getVolumeGroup();
return vg != null
&& !"".equals(vg)
&& getHost().getVolumeGroupNames().contains(vg);
}
@Override
public String enablePredicate() {
return null;
}
@Override
public void action() {
final LVCreate lvCreate = new LVCreate(
getHost(),
getVolumeGroup(),
thisClass.getBlockDevice());
while (true) {
lvCreate.showDialog();
if (lvCreate.isPressedCancelButton()) {
lvCreate.cancelDialog();
return;
} else if (lvCreate.isPressedFinishButton()) {
break;
}
}
}
@Override
public void update() {
setText1(LV_CREATE_MENU_ITEM + getVolumeGroup());
super.update();
}
};
mi.setToolTipText(LV_CREATE_MENU_DESCRIPTION);
return mi;
}
/** Returns 'LV remove' menu item. */
private MyMenuItem getLVRemoveItem() {
return new MyMenuItem(LV_REMOVE_MENU_ITEM,
null,
LV_REMOVE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return true;
}
@Override
public boolean visiblePredicate() {
return isLVM();
}
@Override
public String enablePredicate() {
if (getBlockDevice().isDrbd()) {
return "DRBD is on it";
}
return null;
}
@Override
public void action() {
if (Tools.confirmDialog(
"Remove Logical Volume",
"Remove logical volume and DESTROY all the data on it?",
"Remove",
"Cancel")) {
final boolean ret = lvRemove(false);
final Host host = getHost();
getBrowser().getClusterBrowser().updateHWInfo(host);
}
}
};
}
/** Returns 'LV remove' menu item. */
private MyMenuItem getLVResizeItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(LV_RESIZE_MENU_ITEM,
null,
LV_RESIZE_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
public boolean visiblePredicate() {
return isLVM();
}
public String enablePredicate() {
return null;
}
@Override
public void action() {
final LVResize lvmrd = new LVResize(thisBDI);
while (true) {
lvmrd.showDialog();
if (lvmrd.isPressedCancelButton()) {
lvmrd.cancelDialog();
return;
} else if (lvmrd.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Returns 'LV snapshot' menu item. */
private MyMenuItem getLVSnapshotItem() {
final BlockDevInfo thisBDI = this;
return new MyMenuItem(LV_SNAPSHOT_MENU_ITEM,
null,
LV_SNAPSHOT_MENU_DESCRIPTION,
new AccessMode(ConfigData.AccessType.OP, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return isLVM();
}
@Override
public String enablePredicate() {
return null;
}
@Override
public void action() {
final LVSnapshot lvsd = new LVSnapshot(thisBDI);
while (true) {
lvsd.showDialog();
if (lvsd.isPressedCancelButton()) {
lvsd.cancelDialog();
return;
} else if (lvsd.isPressedFinishButton()) {
break;
}
}
}
};
}
/** Creates popup for the block device. */
@Override
public List<UpdatableItem> createPopup() {
final List<UpdatableItem> items = new ArrayList<UpdatableItem>();
final BlockDevInfo thisClass = this;
final boolean testOnly = false;
final MyMenu repMenuItem = new MyMenu(
Tools.getString("HostBrowser.Drbd.AddDrbdResource"),
new AccessMode(ConfigData.AccessType.ADMIN, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (drbdVolumeInfo != null) {
return "it is already a drbd resouce";
} else if (!getHost().isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!getHost().isDrbdLoaded()) {
return "drbd is not loaded";
} else if (getBlockDevice().isMounted()) {
return "is mounted";
} else if (getBlockDevice().isVolumeGroupOnPhysicalVolume()) {
return "is volume group";
} else if (!getBlockDevice().isAvailable()) {
return "not available";
} else if (dxml.isDrbdDisabled()) {
return "disabled because of config";
}
return null;
}
@Override
public void update() {
super.update();
Cluster cluster = getHost().getCluster();
Host[] otherHosts = cluster.getHostsArray();
final List<MyMenu> hostMenus = new ArrayList<MyMenu>();
for (final Host oHost : otherHosts) {
if (oHost == getHost()) {
continue;
}
final MyMenu hostMenu = new MyMenu(oHost.getName(),
new AccessMode(
ConfigData.AccessType.ADMIN,
false),
new AccessMode(
ConfigData.AccessType.OP,
false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (!oHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!oHost.isDrbdLoaded()) {
return "drbd is not loaded";
} else {
return null;
}
//return oHost.isConnected()
// && oHost.isDrbdLoaded();
}
@Override
public void update() {
super.update();
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
}
});
Set<BlockDevInfo> blockDevInfos =
oHost.getBrowser().getBlockDevInfosInSwing();
List<BlockDevInfo> blockDevInfosS =
new ArrayList<BlockDevInfo>();
for (final BlockDevInfo oBdi : blockDevInfos) {
if (oBdi.getName().equals(
getBlockDevice().getName())) {
blockDevInfosS.add(0, oBdi);
} else {
blockDevInfosS.add(oBdi);
}
}
for (final BlockDevInfo oBdi : blockDevInfosS) {
if (oBdi.getDrbdVolumeInfo() == null
&& oBdi.getBlockDevice().isAvailable()) {
add(addDrbdResourceMenuItem(oBdi,
testOnly));
}
if (oBdi.getName().equals(
getBlockDevice().getName())) {
addSeparator();
}
}
}
};
hostMenu.update();
hostMenus.add(hostMenu);
}
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
for (final MyMenu hostMenu : hostMenus) {
add(hostMenu);
}
}
});
}
};
items.add(repMenuItem);
/* PV Create */
items.add(getPVCreateItem());
/* PV Remove */
items.add(getPVRemoveItem());
/* VG Create */
items.add(getVGCreateItem());
/* VG Remove */
items.add(getVGRemoveItem());
/* LV Create */
items.add(getLVCreateItem());
/* LV Remove */
items.add(getLVRemoveItem());
/* LV Resize */
items.add(getLVResizeItem());
/* LV Snapshot */
items.add(getLVSnapshotItem());
/* attach / detach */
final MyMenuItem attachMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Detach"),
NO_HARDDISK_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Detach.ToolTip"),
Tools.getString("HostBrowser.Drbd.Attach"),
HARDDISK_DRBD_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Attach.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return !getBlockDevice().isDrbd()
|| getBlockDevice().isAttached();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Attach"))) {
attach(testOnly);
} else {
detach(testOnly);
}
}
};
final ClusterBrowser wi = getBrowser().getClusterBrowser();
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback attachItemCallback =
wi.new DRBDMenuItemCallback(attachMenu, getHost()) {
@Override
public void action(final Host host) {
if (isDiskless(false)) {
attach(true);
} else {
detach(true);
}
}
};
addMouseOverListener(attachMenu, attachItemCallback);
}
items.add(attachMenu);
/* connect / disconnect */
final MyMenuItem connectMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Disconnect"),
null,
Tools.getString("HostBrowser.Drbd.Disconnect"),
Tools.getString("HostBrowser.Drbd.Connect"),
null,
Tools.getString("HostBrowser.Drbd.Connect"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return isConnectedOrWF(testOnly);
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()
|| ((getBlockDevice().isPrimary()
&& getBlockDevice().isSyncSource())
|| (getOtherBlockDevInfo().getBlockDevice().
isPrimary()
&& getBlockDevice().isSyncTarget()))) {
return null;
} else {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Connect"))) {
connect(testOnly);
} else {
disconnect(testOnly);
}
}
};
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback connectItemCallback =
wi.new DRBDMenuItemCallback(connectMenu,
getHost()) {
@Override
public void action(final Host host) {
if (isConnectedOrWF(false)) {
disconnect(true);
} else {
connect(true);
}
}
};
addMouseOverListener(connectMenu, connectItemCallback);
}
items.add(connectMenu);
/* set primary */
final MyMenuItem setPrimaryItem =
new MyMenuItem(Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
Tools.getString("HostBrowser.Drbd.SetPrimary"),
null,
Tools.getString("HostBrowser.Drbd.SetPrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getBlockDevice().isSecondary()
&& getOtherBlockDevInfo().getBlockDevice().isPrimary();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSecondary()) {
return "cannot do that to the primary";
}
return null;
}
@Override
public void action() {
BlockDevInfo oBdi = getOtherBlockDevInfo();
if (oBdi != null && oBdi.getBlockDevice().isPrimary()
&& !"yes".equals(
drbdVolumeInfo.getDrbdResourceInfo().getParamSaved(
ALLOW_TWO_PRIMARIES))) {
oBdi.setSecondary(testOnly);
}
setPrimary(testOnly);
}
};
items.add(setPrimaryItem);
/* set secondary */
final MyMenuItem setSecondaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.SetSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetSecondary.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isPrimary()) {
return "cannot do that to the secondary";
}
return null;
}
@Override
public void action() {
setSecondary(testOnly);
}
};
//enableMenu(setSecondaryItem, false);
items.add(setSecondaryItem);
/* force primary */
final MyMenuItem forcePrimaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ForcePrimary"),
null,
Tools.getString("HostBrowser.Drbd.ForcePrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
return null;
}
@Override
public void action() {
forcePrimary(testOnly);
}
};
items.add(forcePrimaryItem);
/* invalidate */
final MyMenuItem invalidateItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.Invalidate"),
null,
Tools.getString("HostBrowser.Drbd.Invalidate.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
if (getDrbdVolumeInfo().isVerifying()) {
return DrbdVolumeInfo.IS_VERIFYING_STRING;
}
return null;
//return !getBlockDevice().isSyncing()
// && !getDrbdVolumeInfo().isVerifying();
}
@Override
public void action() {
invalidateBD(testOnly);
}
};
items.add(invalidateItem);
/* resume / pause sync */
final MyMenuItem resumeSyncItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.ResumeSync"),
null,
Tools.getString("HostBrowser.Drbd.ResumeSync.ToolTip"),
Tools.getString("HostBrowser.Drbd.PauseSync"),
null,
Tools.getString("HostBrowser.Drbd.PauseSync.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return getBlockDevice().isSyncing()
&& getBlockDevice().isPausedSync();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()) {
return "it is not being synced";
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.ResumeSync"))) {
resumeSync(testOnly);
} else {
pauseSync(testOnly);
}
}
};
items.add(resumeSyncItem);
/* resize */
final MyMenuItem resizeItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Resize"),
null,
Tools.getString("HostBrowser.Drbd.Resize.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
resizeDrbd(testOnly);
}
};
items.add(resizeItem);
/* discard my data */
final MyMenuItem discardDataItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.DiscardData"),
null,
Tools.getString(
"HostBrowser.Drbd.DiscardData.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
//if (isConnected(testOnly)) { // ? TODO: check this
// return "is connected";
//}
if (getBlockDevice().isPrimary()) {
return "cannot do that to the primary";
}
return null;
//return !getBlockDevice().isSyncing()
// && !isConnected(testOnly)
// && !getBlockDevice().isPrimary();
}
@Override
public void action() {
discardData(testOnly);
}
};
items.add(discardDataItem);
/* proxy up/down */
final MyMenuItem proxyItem =
new MyMenuItem(Tools.getString("BlockDevInfo.Drbd.ProxyDown"),
null,
getMenuToolTip("DRBD.proxyDown"),
Tools.getString("BlockDevInfo.Drbd.ProxyUp"),
null,
getMenuToolTip("DRBD.proxyUp"),
new AccessMode(ConfigData.AccessType.ADMIN,
!AccessMode.ADVANCED),
new AccessMode(ConfigData.AccessType.OP,
!AccessMode.ADVANCED)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getDrbdVolumeInfo().getDrbdResourceInfo().isProxy(
getHost());
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
+ if (pHost == null) {
+ return "not a proxy";
+ }
if (!pHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
}
if (!pHost.isDrbdProxyRunning()) {
return "proxy daemon is not running";
}
return null;
}
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
+ if (pHost == null) {
+ return false;
+ }
if (getBlockDevice().isDrbd()) {
return pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName());
} else {
return true;
}
}
@Override
public void action() {
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName())) {
DRBD.proxyDown(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
} else {
DRBD.proxyUp(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
getBrowser().getClusterBrowser().updateProxyHWInfo(pHost);
}
};
items.add(proxyItem);
/* view log */
final MyMenuItem viewDrbdLogItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ViewDrbdLog"),
LOGFILE_ICON,
null,
new AccessMode(ConfigData.AccessType.RO, false),
new AccessMode(ConfigData.AccessType.RO, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
return null;
}
@Override
public void action() {
String device = getDrbdVolumeInfo().getDevice();
DrbdLog l = new DrbdLog(getHost(), device);
l.showDialog();
}
};
items.add(viewDrbdLogItem);
return items;
}
/** Returns how much of the block device is used. */
@Override
public int getUsed() {
final DrbdVolumeInfo dvi = drbdVolumeInfo;
if (dvi != null) {
return dvi.getUsed();
}
return getBlockDevice().getUsed();
}
/** Returns text that appears above the icon. */
public String getIconTextForGraph(final boolean testOnly) {
if (!getHost().isConnected()) {
return Tools.getString("HostBrowser.Drbd.NoInfoAvailable");
}
if (getBlockDevice().isDrbd()) {
return getBlockDevice().getNodeState();
}
return null;
}
public String getMainTextForGraph() {
if (!isLVM()) {
final String vg = getBlockDevice().getVolumeGroupOnPhysicalVolume();
if (vg != null && !"".equals(vg)) {
return "VG " + vg;
}
}
return getName();
}
/** Returns text that appears in the corner of the drbd graph. */
public Subtext getRightCornerTextForDrbdGraph(final boolean testOnly) {
String vg = null;
if (isLVM()) {
vg = getBlockDevice().getVolumeGroup();
} else {
vg = getBlockDevice().getVolumeGroupOnPhysicalVolume();
}
if (getBlockDevice().isDrbdMetaDisk()) {
return METADISK_SUBTEXT;
} else if (getBlockDevice().isSwap()) {
return SWAP_SUBTEXT;
} else if (getBlockDevice().getMountedOn() != null) {
return MOUNTED_SUBTEXT;
} else if (getBlockDevice().isDrbd()) {
String s = getBlockDevice().getName();
// TODO: cache that
if (s.length() > MAX_RIGHT_CORNER_STRING_LENGTH) {
s = "..." + s.substring(
s.length()
- MAX_RIGHT_CORNER_STRING_LENGTH + 3,
s.length());
}
if (getBlockDevice().isDrbdPhysicalVolume()) {
final String drbdVG = getBlockDevice().getDrbdBlockDevice()
.getVolumeGroupOnPhysicalVolume();
if (drbdVG != null && !"".equals(drbdVG)) {
s = s + " VG:" + drbdVG;
} else {
s = s + " PV";
}
}
return new Subtext(s, Color.BLUE, Color.BLACK);
} else if (vg != null && !"".equals(vg)) {
if (isLVM()) {
return new Subtext("LV in " + vg, Color.BLUE, Color.GREEN);
} else {
return new Subtext(getName(), Color.BLUE, Color.GREEN);
}
} else if (getBlockDevice().isPhysicalVolume()) {
return PHYSICAL_VOLUME_SUBTEXT;
}
return null;
}
/** Returns whether this device is connected via drbd. */
public boolean isConnected(final boolean testOnly) {
final DRBDtestData dtd = getDRBDtestData();
if (testOnly && dtd != null) {
return isConnectedTest(dtd) && !isWFConnection(testOnly);
} else {
return getBlockDevice().isConnected();
}
}
/** Returns whether this device is connected or wait-for-c via drbd. */
boolean isConnectedOrWF(final boolean testOnly) {
final DRBDtestData dtd = getDRBDtestData();
if (testOnly && dtd != null) {
return isConnectedTest(dtd);
} else {
return getBlockDevice().isConnectedOrWF();
}
}
/** Returns whether this device is in wait-for-connection state. */
public boolean isWFConnection(final boolean testOnly) {
final DRBDtestData dtd = getDRBDtestData();
if (testOnly && dtd != null) {
return isConnectedOrWF(testOnly)
&& isConnectedTest(dtd)
&& !getOtherBlockDevInfo().isConnectedTest(dtd);
} else {
return getBlockDevice().isWFConnection();
}
}
/** Returns whether this device will be disconnected. */
boolean isConnectedTest(final DRBDtestData dtd) {
return dtd.isConnected(getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName())
|| (!dtd.isDisconnected(
getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName())
&& getBlockDevice().isConnectedOrWF());
}
/** Returns whether this device is diskless. */
public boolean isDiskless(final boolean testOnly) {
final DRBDtestData dtd = getDRBDtestData();
final DrbdVolumeInfo dvi = drbdVolumeInfo;
if (testOnly && dtd != null && dvi != null) {
return dtd.isDiskless(getHost(), drbdVolumeInfo.getDevice())
|| (!dtd.isAttached(getHost(),
drbdVolumeInfo.getDevice())
&& getBlockDevice().isDiskless());
} else {
return getBlockDevice().isDiskless();
}
}
/** Returns drbd test data. */
DRBDtestData getDRBDtestData() {
final ClusterBrowser b = getBrowser().getClusterBrowser();
if (b == null) {
return null;
}
return b.getDRBDtestData();
}
/** Sets drbd test data. */
void setDRBDtestData(final DRBDtestData drbdtestData) {
final ClusterBrowser b = getBrowser().getClusterBrowser();
if (b == null) {
return;
}
b.setDRBDtestData(drbdtestData);
}
/** Compares ignoring case and using drbd device names if available. */
@Override
public int compareTo(final Info o) {
String name;
String oName;
int volume = 0;
int oVolume = 0;
final DrbdVolumeInfo dvi = getDrbdVolumeInfo();
if (getBlockDevice().isDrbd() && dvi != null) {
name = dvi.getDrbdResourceInfo().getName();
final String v = dvi.getName();
if (Tools.isNumber(v)) {
volume = Integer.parseInt(v);
}
} else {
name = getName();
}
final BlockDevInfo obdi = (BlockDevInfo) o;
final DrbdVolumeInfo odvi = obdi.getDrbdVolumeInfo();
if (obdi.getBlockDevice().isDrbd() && odvi != null) {
oName = odvi.getDrbdResourceInfo().getName();
final String v = odvi.getName();
if (Tools.isNumber(v)) {
oVolume = Integer.parseInt(v);
}
} else {
oName = ((BlockDevInfo) o).getName();
}
/* drbds up */
if (getBlockDevice().isDrbd()
&& !obdi.getBlockDevice().isDrbd()) {
return -1;
}
if (!getBlockDevice().isDrbd()
&& obdi.getBlockDevice().isDrbd()) {
return 1;
}
/* volume groups down */
if (getBlockDevice().isVolumeGroupOnPhysicalVolume()
&& !obdi.getBlockDevice().isVolumeGroupOnPhysicalVolume()) {
return 1;
}
if (!getBlockDevice().isVolumeGroupOnPhysicalVolume()
&& obdi.getBlockDevice().isVolumeGroupOnPhysicalVolume()) {
return -1;
}
final int ret = name.compareToIgnoreCase(oName);
if (ret == 0) {
return volume - oVolume;
}
return ret;
}
/** Sets stored parameters. */
public void setParameters(final String resName) {
getBlockDevice().setNew(false);
final ClusterBrowser clusterBrowser = getBrowser().getClusterBrowser();
if (clusterBrowser == null) {
return;
}
final DrbdVolumeInfo dvi = drbdVolumeInfo;
if (dvi == null) {
return;
}
final DrbdXML dxml = clusterBrowser.getDrbdXML();
final String hostName = getHost().getName();
final DrbdGraph drbdGraph = getBrowser().getDrbdGraph();
String value = null;
final String volumeNr = dvi.getName();
for (final String param : getParametersFromXML()) {
if (DRBD_MD_PARAM.equals(param)) {
value = dxml.getMetaDisk(hostName, resName, volumeNr);
if (!"internal".equals(value)) {
final BlockDevInfo mdI =
drbdGraph.findBlockDevInfo(hostName, value);
if (mdI != null) {
getBlockDevice().setMetaDisk(mdI.getBlockDevice());
}
}
} else if (DRBD_MD_INDEX_PARAM.equals(param)) {
value = dxml.getMetaDiskIndex(hostName, resName, volumeNr);
}
final String defaultValue = getParamDefault(param);
if (value == null) {
value = defaultValue;
}
if (value == null) {
value = "";
}
final String oldValue = getParamSaved(param);
final Widget wi = getWidget(param, null);
if (!Tools.areEqual(value, oldValue)) {
getResource().setValue(param, value);
if (wi != null) {
wi.setValue(value);
}
}
}
}
/**
* Returns whether the specified parameter or any of the parameters
* have changed. If param is null, only param will be checked,
* otherwise all parameters will be checked.
*/
@Override
public boolean checkResourceFieldsChanged(final String param,
final String[] params) {
return checkResourceFieldsChanged(param, params, false, false, false);
}
/**
* Returns whether the specified parameter or any of the parameters
* have changed. If param is null, only param will be checked,
* otherwise all parameters will be checked.
*/
boolean checkResourceFieldsChanged(
final String param,
final String[] params,
final boolean fromDrbdInfo,
final boolean fromDrbdResourceInfo,
final boolean fromDrbdVolumeInfo) {
final DrbdVolumeInfo dvi = getDrbdVolumeInfo();
if (dvi != null
&& !fromDrbdVolumeInfo
&& !fromDrbdResourceInfo
&& !fromDrbdInfo) {
dvi.setApplyButtons(null, dvi.getParametersFromXML());
}
return super.checkResourceFieldsChanged(param, params);
}
/**
* Returns whether all the parameters are correct. If param is null,
* all paremeters will be checked, otherwise only the param, but other
* parameters will be checked only in the cache. This is good if only
* one value is changed and we don't want to check everything.
*/
@Override
public boolean checkResourceFieldsCorrect(final String param,
final String[] params) {
return checkResourceFieldsCorrect(param, params, false, false, false);
}
/**
* Returns whether all the parameters are correct. If param is null,
* all paremeters will be checked, otherwise only the param, but other
* parameters will be checked only in the cache. This is good if only
* one value is changed and we don't want to check everything.
*/
boolean checkResourceFieldsCorrect(final String param,
final String[] params,
final boolean fromDrbdInfo,
final boolean fromDrbdResourceInfo,
final boolean fromDrbdVolumeInfo) {
boolean correct = true;
final DrbdXML dxml = getBrowser().getClusterBrowser().getDrbdXML();
if (dxml != null && dxml.isDrbdDisabled()) {
correct = false;
}
return super.checkResourceFieldsCorrect(param, params) && correct;
}
/** Returns whether this block device is a volume group in LVM. */
public boolean isLVM() {
return getBlockDevice().getVolumeGroup() != null;
}
/** Returns how much is free space in a volume group. */
public Long getFreeInVolumeGroup() {
return getHost().getFreeInVolumeGroup(
getBlockDevice().getVolumeGroup());
}
/** Returns true if this is the first volume in the resource. Returns true
* if this is not a DRBD resource. */
public boolean isFirstDrbdVolume() {
if (!getBlockDevice().isDrbd()) {
return true;
}
final Set<DrbdVolumeInfo> drbdVolumes =
getDrbdVolumeInfo().getDrbdResourceInfo().getDrbdVolumes();
if (drbdVolumes == null || drbdVolumes.isEmpty()) {
return true;
}
return drbdVolumes.iterator().next() == getDrbdVolumeInfo();
}
/** Return whether two primaries are allowed. */
boolean allowTwoPrimaries() {
final DrbdResourceInfo dri = drbdVolumeInfo.getDrbdResourceInfo();
return "yes".equals(dri.getParamSaved(ALLOW_TWO_PRIMARIES));
}
/**
* Proxy status for graph, null if there's no proxy configured for the
* resource.
*/
public String getProxyStateForGraph(final boolean testOnly) {
final DrbdResourceInfo dri = drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (dri.isProxy(getHost())) {
if (pHost.isConnected()) {
if (pHost.isDrbdProxyUp(dri.getName())) {
return PROXY_UP;
} else {
return PROXY_DOWN;
}
} else {
if (drbdVolumeInfo.isConnected(testOnly)) {
return PROXY_UP;
} else {
return pHost.getName();
}
}
}
return null;
}
/** Tool tip for menu items. */
private String getMenuToolTip(final String cmd) {
if (getBlockDevice().isDrbd()) {
return DRBD.getDistCommand(
cmd,
getHost(),
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName()).replaceAll("@.*?@", "");
} else {
return null;
}
}
}
| false | true | public List<UpdatableItem> createPopup() {
final List<UpdatableItem> items = new ArrayList<UpdatableItem>();
final BlockDevInfo thisClass = this;
final boolean testOnly = false;
final MyMenu repMenuItem = new MyMenu(
Tools.getString("HostBrowser.Drbd.AddDrbdResource"),
new AccessMode(ConfigData.AccessType.ADMIN, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (drbdVolumeInfo != null) {
return "it is already a drbd resouce";
} else if (!getHost().isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!getHost().isDrbdLoaded()) {
return "drbd is not loaded";
} else if (getBlockDevice().isMounted()) {
return "is mounted";
} else if (getBlockDevice().isVolumeGroupOnPhysicalVolume()) {
return "is volume group";
} else if (!getBlockDevice().isAvailable()) {
return "not available";
} else if (dxml.isDrbdDisabled()) {
return "disabled because of config";
}
return null;
}
@Override
public void update() {
super.update();
Cluster cluster = getHost().getCluster();
Host[] otherHosts = cluster.getHostsArray();
final List<MyMenu> hostMenus = new ArrayList<MyMenu>();
for (final Host oHost : otherHosts) {
if (oHost == getHost()) {
continue;
}
final MyMenu hostMenu = new MyMenu(oHost.getName(),
new AccessMode(
ConfigData.AccessType.ADMIN,
false),
new AccessMode(
ConfigData.AccessType.OP,
false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (!oHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!oHost.isDrbdLoaded()) {
return "drbd is not loaded";
} else {
return null;
}
//return oHost.isConnected()
// && oHost.isDrbdLoaded();
}
@Override
public void update() {
super.update();
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
}
});
Set<BlockDevInfo> blockDevInfos =
oHost.getBrowser().getBlockDevInfosInSwing();
List<BlockDevInfo> blockDevInfosS =
new ArrayList<BlockDevInfo>();
for (final BlockDevInfo oBdi : blockDevInfos) {
if (oBdi.getName().equals(
getBlockDevice().getName())) {
blockDevInfosS.add(0, oBdi);
} else {
blockDevInfosS.add(oBdi);
}
}
for (final BlockDevInfo oBdi : blockDevInfosS) {
if (oBdi.getDrbdVolumeInfo() == null
&& oBdi.getBlockDevice().isAvailable()) {
add(addDrbdResourceMenuItem(oBdi,
testOnly));
}
if (oBdi.getName().equals(
getBlockDevice().getName())) {
addSeparator();
}
}
}
};
hostMenu.update();
hostMenus.add(hostMenu);
}
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
for (final MyMenu hostMenu : hostMenus) {
add(hostMenu);
}
}
});
}
};
items.add(repMenuItem);
/* PV Create */
items.add(getPVCreateItem());
/* PV Remove */
items.add(getPVRemoveItem());
/* VG Create */
items.add(getVGCreateItem());
/* VG Remove */
items.add(getVGRemoveItem());
/* LV Create */
items.add(getLVCreateItem());
/* LV Remove */
items.add(getLVRemoveItem());
/* LV Resize */
items.add(getLVResizeItem());
/* LV Snapshot */
items.add(getLVSnapshotItem());
/* attach / detach */
final MyMenuItem attachMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Detach"),
NO_HARDDISK_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Detach.ToolTip"),
Tools.getString("HostBrowser.Drbd.Attach"),
HARDDISK_DRBD_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Attach.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return !getBlockDevice().isDrbd()
|| getBlockDevice().isAttached();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Attach"))) {
attach(testOnly);
} else {
detach(testOnly);
}
}
};
final ClusterBrowser wi = getBrowser().getClusterBrowser();
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback attachItemCallback =
wi.new DRBDMenuItemCallback(attachMenu, getHost()) {
@Override
public void action(final Host host) {
if (isDiskless(false)) {
attach(true);
} else {
detach(true);
}
}
};
addMouseOverListener(attachMenu, attachItemCallback);
}
items.add(attachMenu);
/* connect / disconnect */
final MyMenuItem connectMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Disconnect"),
null,
Tools.getString("HostBrowser.Drbd.Disconnect"),
Tools.getString("HostBrowser.Drbd.Connect"),
null,
Tools.getString("HostBrowser.Drbd.Connect"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return isConnectedOrWF(testOnly);
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()
|| ((getBlockDevice().isPrimary()
&& getBlockDevice().isSyncSource())
|| (getOtherBlockDevInfo().getBlockDevice().
isPrimary()
&& getBlockDevice().isSyncTarget()))) {
return null;
} else {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Connect"))) {
connect(testOnly);
} else {
disconnect(testOnly);
}
}
};
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback connectItemCallback =
wi.new DRBDMenuItemCallback(connectMenu,
getHost()) {
@Override
public void action(final Host host) {
if (isConnectedOrWF(false)) {
disconnect(true);
} else {
connect(true);
}
}
};
addMouseOverListener(connectMenu, connectItemCallback);
}
items.add(connectMenu);
/* set primary */
final MyMenuItem setPrimaryItem =
new MyMenuItem(Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
Tools.getString("HostBrowser.Drbd.SetPrimary"),
null,
Tools.getString("HostBrowser.Drbd.SetPrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getBlockDevice().isSecondary()
&& getOtherBlockDevInfo().getBlockDevice().isPrimary();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSecondary()) {
return "cannot do that to the primary";
}
return null;
}
@Override
public void action() {
BlockDevInfo oBdi = getOtherBlockDevInfo();
if (oBdi != null && oBdi.getBlockDevice().isPrimary()
&& !"yes".equals(
drbdVolumeInfo.getDrbdResourceInfo().getParamSaved(
ALLOW_TWO_PRIMARIES))) {
oBdi.setSecondary(testOnly);
}
setPrimary(testOnly);
}
};
items.add(setPrimaryItem);
/* set secondary */
final MyMenuItem setSecondaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.SetSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetSecondary.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isPrimary()) {
return "cannot do that to the secondary";
}
return null;
}
@Override
public void action() {
setSecondary(testOnly);
}
};
//enableMenu(setSecondaryItem, false);
items.add(setSecondaryItem);
/* force primary */
final MyMenuItem forcePrimaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ForcePrimary"),
null,
Tools.getString("HostBrowser.Drbd.ForcePrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
return null;
}
@Override
public void action() {
forcePrimary(testOnly);
}
};
items.add(forcePrimaryItem);
/* invalidate */
final MyMenuItem invalidateItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.Invalidate"),
null,
Tools.getString("HostBrowser.Drbd.Invalidate.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
if (getDrbdVolumeInfo().isVerifying()) {
return DrbdVolumeInfo.IS_VERIFYING_STRING;
}
return null;
//return !getBlockDevice().isSyncing()
// && !getDrbdVolumeInfo().isVerifying();
}
@Override
public void action() {
invalidateBD(testOnly);
}
};
items.add(invalidateItem);
/* resume / pause sync */
final MyMenuItem resumeSyncItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.ResumeSync"),
null,
Tools.getString("HostBrowser.Drbd.ResumeSync.ToolTip"),
Tools.getString("HostBrowser.Drbd.PauseSync"),
null,
Tools.getString("HostBrowser.Drbd.PauseSync.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return getBlockDevice().isSyncing()
&& getBlockDevice().isPausedSync();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()) {
return "it is not being synced";
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.ResumeSync"))) {
resumeSync(testOnly);
} else {
pauseSync(testOnly);
}
}
};
items.add(resumeSyncItem);
/* resize */
final MyMenuItem resizeItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Resize"),
null,
Tools.getString("HostBrowser.Drbd.Resize.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
resizeDrbd(testOnly);
}
};
items.add(resizeItem);
/* discard my data */
final MyMenuItem discardDataItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.DiscardData"),
null,
Tools.getString(
"HostBrowser.Drbd.DiscardData.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
//if (isConnected(testOnly)) { // ? TODO: check this
// return "is connected";
//}
if (getBlockDevice().isPrimary()) {
return "cannot do that to the primary";
}
return null;
//return !getBlockDevice().isSyncing()
// && !isConnected(testOnly)
// && !getBlockDevice().isPrimary();
}
@Override
public void action() {
discardData(testOnly);
}
};
items.add(discardDataItem);
/* proxy up/down */
final MyMenuItem proxyItem =
new MyMenuItem(Tools.getString("BlockDevInfo.Drbd.ProxyDown"),
null,
getMenuToolTip("DRBD.proxyDown"),
Tools.getString("BlockDevInfo.Drbd.ProxyUp"),
null,
getMenuToolTip("DRBD.proxyUp"),
new AccessMode(ConfigData.AccessType.ADMIN,
!AccessMode.ADVANCED),
new AccessMode(ConfigData.AccessType.OP,
!AccessMode.ADVANCED)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getDrbdVolumeInfo().getDrbdResourceInfo().isProxy(
getHost());
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (!pHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
}
if (!pHost.isDrbdProxyRunning()) {
return "proxy daemon is not running";
}
return null;
}
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (getBlockDevice().isDrbd()) {
return pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName());
} else {
return true;
}
}
@Override
public void action() {
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName())) {
DRBD.proxyDown(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
} else {
DRBD.proxyUp(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
getBrowser().getClusterBrowser().updateProxyHWInfo(pHost);
}
};
items.add(proxyItem);
/* view log */
final MyMenuItem viewDrbdLogItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ViewDrbdLog"),
LOGFILE_ICON,
null,
new AccessMode(ConfigData.AccessType.RO, false),
new AccessMode(ConfigData.AccessType.RO, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
return null;
}
@Override
public void action() {
String device = getDrbdVolumeInfo().getDevice();
DrbdLog l = new DrbdLog(getHost(), device);
l.showDialog();
}
};
items.add(viewDrbdLogItem);
return items;
}
| public List<UpdatableItem> createPopup() {
final List<UpdatableItem> items = new ArrayList<UpdatableItem>();
final BlockDevInfo thisClass = this;
final boolean testOnly = false;
final MyMenu repMenuItem = new MyMenu(
Tools.getString("HostBrowser.Drbd.AddDrbdResource"),
new AccessMode(ConfigData.AccessType.ADMIN, false),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (drbdVolumeInfo != null) {
return "it is already a drbd resouce";
} else if (!getHost().isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!getHost().isDrbdLoaded()) {
return "drbd is not loaded";
} else if (getBlockDevice().isMounted()) {
return "is mounted";
} else if (getBlockDevice().isVolumeGroupOnPhysicalVolume()) {
return "is volume group";
} else if (!getBlockDevice().isAvailable()) {
return "not available";
} else if (dxml.isDrbdDisabled()) {
return "disabled because of config";
}
return null;
}
@Override
public void update() {
super.update();
Cluster cluster = getHost().getCluster();
Host[] otherHosts = cluster.getHostsArray();
final List<MyMenu> hostMenus = new ArrayList<MyMenu>();
for (final Host oHost : otherHosts) {
if (oHost == getHost()) {
continue;
}
final MyMenu hostMenu = new MyMenu(oHost.getName(),
new AccessMode(
ConfigData.AccessType.ADMIN,
false),
new AccessMode(
ConfigData.AccessType.OP,
false)) {
private static final long serialVersionUID = 1L;
@Override
public String enablePredicate() {
final DrbdXML dxml =
getBrowser().getClusterBrowser().getDrbdXML();
if (!oHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
} else if (!oHost.isDrbdLoaded()) {
return "drbd is not loaded";
} else {
return null;
}
//return oHost.isConnected()
// && oHost.isDrbdLoaded();
}
@Override
public void update() {
super.update();
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
}
});
Set<BlockDevInfo> blockDevInfos =
oHost.getBrowser().getBlockDevInfosInSwing();
List<BlockDevInfo> blockDevInfosS =
new ArrayList<BlockDevInfo>();
for (final BlockDevInfo oBdi : blockDevInfos) {
if (oBdi.getName().equals(
getBlockDevice().getName())) {
blockDevInfosS.add(0, oBdi);
} else {
blockDevInfosS.add(oBdi);
}
}
for (final BlockDevInfo oBdi : blockDevInfosS) {
if (oBdi.getDrbdVolumeInfo() == null
&& oBdi.getBlockDevice().isAvailable()) {
add(addDrbdResourceMenuItem(oBdi,
testOnly));
}
if (oBdi.getName().equals(
getBlockDevice().getName())) {
addSeparator();
}
}
}
};
hostMenu.update();
hostMenus.add(hostMenu);
}
Tools.invokeAndWait(new Runnable() {
@Override
public void run() {
removeAll();
for (final MyMenu hostMenu : hostMenus) {
add(hostMenu);
}
}
});
}
};
items.add(repMenuItem);
/* PV Create */
items.add(getPVCreateItem());
/* PV Remove */
items.add(getPVRemoveItem());
/* VG Create */
items.add(getVGCreateItem());
/* VG Remove */
items.add(getVGRemoveItem());
/* LV Create */
items.add(getLVCreateItem());
/* LV Remove */
items.add(getLVRemoveItem());
/* LV Resize */
items.add(getLVResizeItem());
/* LV Snapshot */
items.add(getLVSnapshotItem());
/* attach / detach */
final MyMenuItem attachMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Detach"),
NO_HARDDISK_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Detach.ToolTip"),
Tools.getString("HostBrowser.Drbd.Attach"),
HARDDISK_DRBD_ICON_LARGE,
Tools.getString("HostBrowser.Drbd.Attach.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return !getBlockDevice().isDrbd()
|| getBlockDevice().isAttached();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Attach"))) {
attach(testOnly);
} else {
detach(testOnly);
}
}
};
final ClusterBrowser wi = getBrowser().getClusterBrowser();
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback attachItemCallback =
wi.new DRBDMenuItemCallback(attachMenu, getHost()) {
@Override
public void action(final Host host) {
if (isDiskless(false)) {
attach(true);
} else {
detach(true);
}
}
};
addMouseOverListener(attachMenu, attachItemCallback);
}
items.add(attachMenu);
/* connect / disconnect */
final MyMenuItem connectMenu =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Disconnect"),
null,
Tools.getString("HostBrowser.Drbd.Disconnect"),
Tools.getString("HostBrowser.Drbd.Connect"),
null,
Tools.getString("HostBrowser.Drbd.Connect"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return isConnectedOrWF(testOnly);
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()
|| ((getBlockDevice().isPrimary()
&& getBlockDevice().isSyncSource())
|| (getOtherBlockDevInfo().getBlockDevice().
isPrimary()
&& getBlockDevice().isSyncTarget()))) {
return null;
} else {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.Connect"))) {
connect(testOnly);
} else {
disconnect(testOnly);
}
}
};
if (wi != null) {
final ClusterBrowser.DRBDMenuItemCallback connectItemCallback =
wi.new DRBDMenuItemCallback(connectMenu,
getHost()) {
@Override
public void action(final Host host) {
if (isConnectedOrWF(false)) {
disconnect(true);
} else {
connect(true);
}
}
};
addMouseOverListener(connectMenu, connectItemCallback);
}
items.add(connectMenu);
/* set primary */
final MyMenuItem setPrimaryItem =
new MyMenuItem(Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetPrimaryOtherSecondary"),
Tools.getString("HostBrowser.Drbd.SetPrimary"),
null,
Tools.getString("HostBrowser.Drbd.SetPrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getBlockDevice().isSecondary()
&& getOtherBlockDevInfo().getBlockDevice().isPrimary();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSecondary()) {
return "cannot do that to the primary";
}
return null;
}
@Override
public void action() {
BlockDevInfo oBdi = getOtherBlockDevInfo();
if (oBdi != null && oBdi.getBlockDevice().isPrimary()
&& !"yes".equals(
drbdVolumeInfo.getDrbdResourceInfo().getParamSaved(
ALLOW_TWO_PRIMARIES))) {
oBdi.setSecondary(testOnly);
}
setPrimary(testOnly);
}
};
items.add(setPrimaryItem);
/* set secondary */
final MyMenuItem setSecondaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.SetSecondary"),
null,
Tools.getString(
"HostBrowser.Drbd.SetSecondary.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isPrimary()) {
return "cannot do that to the secondary";
}
return null;
}
@Override
public void action() {
setSecondary(testOnly);
}
};
//enableMenu(setSecondaryItem, false);
items.add(setSecondaryItem);
/* force primary */
final MyMenuItem forcePrimaryItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ForcePrimary"),
null,
Tools.getString("HostBrowser.Drbd.ForcePrimary"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
return null;
}
@Override
public void action() {
forcePrimary(testOnly);
}
};
items.add(forcePrimaryItem);
/* invalidate */
final MyMenuItem invalidateItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.Invalidate"),
null,
Tools.getString("HostBrowser.Drbd.Invalidate.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
if (getDrbdVolumeInfo().isVerifying()) {
return DrbdVolumeInfo.IS_VERIFYING_STRING;
}
return null;
//return !getBlockDevice().isSyncing()
// && !getDrbdVolumeInfo().isVerifying();
}
@Override
public void action() {
invalidateBD(testOnly);
}
};
items.add(invalidateItem);
/* resume / pause sync */
final MyMenuItem resumeSyncItem =
new MyMenuItem(
Tools.getString("HostBrowser.Drbd.ResumeSync"),
null,
Tools.getString("HostBrowser.Drbd.ResumeSync.ToolTip"),
Tools.getString("HostBrowser.Drbd.PauseSync"),
null,
Tools.getString("HostBrowser.Drbd.PauseSync.ToolTip"),
new AccessMode(ConfigData.AccessType.OP, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean predicate() {
return getBlockDevice().isSyncing()
&& getBlockDevice().isPausedSync();
}
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (!getBlockDevice().isSyncing()) {
return "it is not being synced";
}
return null;
}
@Override
public void action() {
if (this.getText().equals(
Tools.getString("HostBrowser.Drbd.ResumeSync"))) {
resumeSync(testOnly);
} else {
pauseSync(testOnly);
}
}
};
items.add(resumeSyncItem);
/* resize */
final MyMenuItem resizeItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.Resize"),
null,
Tools.getString("HostBrowser.Drbd.Resize.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
return null;
}
@Override
public void action() {
resizeDrbd(testOnly);
}
};
items.add(resizeItem);
/* discard my data */
final MyMenuItem discardDataItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.DiscardData"),
null,
Tools.getString(
"HostBrowser.Drbd.DiscardData.ToolTip"),
new AccessMode(ConfigData.AccessType.ADMIN, true),
new AccessMode(ConfigData.AccessType.OP, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
if (!Tools.getConfigData().isAdvancedMode()
&& drbdVolumeInfo.getDrbdResourceInfo().isUsedByCRM()) {
return DrbdVolumeInfo.IS_USED_BY_CRM_STRING;
}
if (getBlockDevice().isSyncing()) {
return DrbdVolumeInfo.IS_SYNCING_STRING;
}
//if (isConnected(testOnly)) { // ? TODO: check this
// return "is connected";
//}
if (getBlockDevice().isPrimary()) {
return "cannot do that to the primary";
}
return null;
//return !getBlockDevice().isSyncing()
// && !isConnected(testOnly)
// && !getBlockDevice().isPrimary();
}
@Override
public void action() {
discardData(testOnly);
}
};
items.add(discardDataItem);
/* proxy up/down */
final MyMenuItem proxyItem =
new MyMenuItem(Tools.getString("BlockDevInfo.Drbd.ProxyDown"),
null,
getMenuToolTip("DRBD.proxyDown"),
Tools.getString("BlockDevInfo.Drbd.ProxyUp"),
null,
getMenuToolTip("DRBD.proxyUp"),
new AccessMode(ConfigData.AccessType.ADMIN,
!AccessMode.ADVANCED),
new AccessMode(ConfigData.AccessType.OP,
!AccessMode.ADVANCED)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
return getDrbdVolumeInfo().getDrbdResourceInfo().isProxy(
getHost());
}
@Override
public String enablePredicate() {
if (!getBlockDevice().isDrbd()) {
return NO_DRBD_RESOURCE_STRING;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (pHost == null) {
return "not a proxy";
}
if (!pHost.isConnected()) {
return Host.NOT_CONNECTED_STRING;
}
if (!pHost.isDrbdProxyRunning()) {
return "proxy daemon is not running";
}
return null;
}
@Override
public boolean predicate() {
if (!getBlockDevice().isDrbd()) {
return false;
}
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (pHost == null) {
return false;
}
if (getBlockDevice().isDrbd()) {
return pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName());
} else {
return true;
}
}
@Override
public void action() {
final DrbdResourceInfo dri =
drbdVolumeInfo.getDrbdResourceInfo();
final Host pHost =
dri.getProxyHost(getHost(), !DrbdResourceInfo.WIZARD);
if (pHost.isDrbdProxyUp(
drbdVolumeInfo.getDrbdResourceInfo().getName())) {
DRBD.proxyDown(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
} else {
DRBD.proxyUp(
pHost,
drbdVolumeInfo.getDrbdResourceInfo().getName(),
drbdVolumeInfo.getName(),
testOnly);
}
getBrowser().getClusterBrowser().updateProxyHWInfo(pHost);
}
};
items.add(proxyItem);
/* view log */
final MyMenuItem viewDrbdLogItem =
new MyMenuItem(Tools.getString("HostBrowser.Drbd.ViewDrbdLog"),
LOGFILE_ICON,
null,
new AccessMode(ConfigData.AccessType.RO, false),
new AccessMode(ConfigData.AccessType.RO, false)) {
private static final long serialVersionUID = 1L;
@Override
public boolean visiblePredicate() {
return getBlockDevice().isDrbd();
}
@Override
public String enablePredicate() {
return null;
}
@Override
public void action() {
String device = getDrbdVolumeInfo().getDevice();
DrbdLog l = new DrbdLog(getHost(), device);
l.showDialog();
}
};
items.add(viewDrbdLogItem);
return items;
}
|
diff --git a/jdbc2/src/examples/TestJava2d.java b/jdbc2/src/examples/TestJava2d.java
index 2a4ca33d..8ae61481 100644
--- a/jdbc2/src/examples/TestJava2d.java
+++ b/jdbc2/src/examples/TestJava2d.java
@@ -1,179 +1,179 @@
/*
* Test.java
*
* PostGIS extension for PostgreSQL JDBC driver - example and test classes
*
* (C) 2004 Paul Ramsey, [email protected]
*
* (C) 2005 Markus Schaber, [email protected]
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 59 Temple
* Place, Suite 330, Boston, MA 02111-1307 USA or visit the web at
* http://www.gnu.org.
*
* $Id$
*/
package examples;
import java.awt.*;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import org.postgis.java2d.Java2DWrapper;
public class TestJava2d {
private static final boolean DEBUG = true;
public static final Shape[] SHAPEARRAY = new Shape[0];
static {
new Java2DWrapper(); // make shure our driver is initialized
}
public static void main(String[] args) throws ClassNotFoundException, SQLException {
if (args.length != 5) {
- System.err.println("Usage: java examples/TestServer dburl user pass tablename column");
+ System.err.println("Usage: java examples/TestJava2D dburl user pass tablename column");
System.err.println();
System.err.println("dburl has the following format:");
System.err.println(Java2DWrapper.POSTGIS_PROTOCOL + "//HOST:PORT/DATABASENAME");
System.err.println("tablename is 'jdbc_test' by default.");
System.exit(1);
}
Shape[] geometries = read(args[0], args[1], args[2], "SELECT " + args[4] + " FROM "
+ args[3]);
if (geometries.length == 0) {
System.err.println("No geometries were found.");
return;
}
System.err.println("Painting...");
Frame window = new Frame("PostGIS java2D demo");
Canvas CV = new GisCanvas(geometries);
window.add(CV);
window.setSize(500, 500);
window.addWindowListener(new EventHandler());
window.show();
}
static Rectangle2D calcbbox(Shape[] geometries) {
Rectangle2D bbox = geometries[0].getBounds2D();
for (int i = 1; i < geometries.length; i++) {
bbox = bbox.createUnion(geometries[i].getBounds2D());
}
return bbox;
}
private static Shape[] read(String dburl, String dbuser, String dbpass, String query)
throws ClassNotFoundException, SQLException {
ArrayList geometries = new ArrayList();
System.out.println("Creating JDBC connection...");
Class.forName("org.postgresql.Driver");
Connection conn = DriverManager.getConnection(dburl, dbuser, dbpass);
System.out.println("fetching geometries");
ResultSet r = conn.createStatement().executeQuery(query);
while (r.next()) {
final Shape current = (Shape) r.getObject(1);
if (current != null) {
geometries.add(current);
}
}
conn.close();
return (Shape[]) geometries.toArray(SHAPEARRAY);
}
public static class GisCanvas extends Canvas {
/** Keep java 1.5 compiler happy */
private static final long serialVersionUID = 1L;
final Rectangle2D bbox;
final Shape[] geometries;
public GisCanvas(Shape[] geometries) {
this.geometries = geometries;
this.bbox = calcbbox(geometries);
setBackground(Color.GREEN);
}
public void paint(Graphics og) {
Graphics2D g = (Graphics2D) og;
final double scaleX = (super.getWidth() - 10) / bbox.getWidth();
final double scaleY = (super.getHeight() - 10) / bbox.getHeight();
AffineTransform at = new AffineTransform();
at.translate(super.getX() + 5, super.getY() + 5);
at.scale(scaleX, scaleY);
at.translate(-bbox.getX(), -bbox.getY());
if (DEBUG) {
System.err.println();
System.err.println("bbox: " + bbox);
System.err.println("trans: " + at);
System.err.println("new: " + at.createTransformedShape(bbox).getBounds2D());
System.err.println("visual:" + super.getBounds());
}
for (int i = 0; i < geometries.length; i++) {
g.setPaint(Color.BLUE);
final Shape shape = at.createTransformedShape(geometries[i]);
g.fill(shape);
g.setPaint(Color.ORANGE);
g.draw(shape);
}
}
}
public static class EventHandler implements WindowListener {
public void windowActivated(WindowEvent e) {//
}
public void windowClosed(WindowEvent e) {//
}
public void windowClosing(WindowEvent e) {
e.getWindow().hide();
System.exit(0);
}
public void windowDeactivated(WindowEvent e) {//
}
public void windowDeiconified(WindowEvent e) {//
}
public void windowIconified(WindowEvent e) {//
}
public void windowOpened(WindowEvent e) {//
}
}
}
| true | true | public static void main(String[] args) throws ClassNotFoundException, SQLException {
if (args.length != 5) {
System.err.println("Usage: java examples/TestServer dburl user pass tablename column");
System.err.println();
System.err.println("dburl has the following format:");
System.err.println(Java2DWrapper.POSTGIS_PROTOCOL + "//HOST:PORT/DATABASENAME");
System.err.println("tablename is 'jdbc_test' by default.");
System.exit(1);
}
Shape[] geometries = read(args[0], args[1], args[2], "SELECT " + args[4] + " FROM "
+ args[3]);
if (geometries.length == 0) {
System.err.println("No geometries were found.");
return;
}
System.err.println("Painting...");
Frame window = new Frame("PostGIS java2D demo");
Canvas CV = new GisCanvas(geometries);
window.add(CV);
window.setSize(500, 500);
window.addWindowListener(new EventHandler());
window.show();
}
| public static void main(String[] args) throws ClassNotFoundException, SQLException {
if (args.length != 5) {
System.err.println("Usage: java examples/TestJava2D dburl user pass tablename column");
System.err.println();
System.err.println("dburl has the following format:");
System.err.println(Java2DWrapper.POSTGIS_PROTOCOL + "//HOST:PORT/DATABASENAME");
System.err.println("tablename is 'jdbc_test' by default.");
System.exit(1);
}
Shape[] geometries = read(args[0], args[1], args[2], "SELECT " + args[4] + " FROM "
+ args[3]);
if (geometries.length == 0) {
System.err.println("No geometries were found.");
return;
}
System.err.println("Painting...");
Frame window = new Frame("PostGIS java2D demo");
Canvas CV = new GisCanvas(geometries);
window.add(CV);
window.setSize(500, 500);
window.addWindowListener(new EventHandler());
window.show();
}
|
diff --git a/standalone/src/main/java/org/usergrid/standalone/ServerLoader.java b/standalone/src/main/java/org/usergrid/standalone/ServerLoader.java
index 44515f66..f55eb1f7 100644
--- a/standalone/src/main/java/org/usergrid/standalone/ServerLoader.java
+++ b/standalone/src/main/java/org/usergrid/standalone/ServerLoader.java
@@ -1,31 +1,31 @@
/*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.standalone;
import com.jdotsoft.jarloader.JarClassLoader;
public class ServerLoader {
public static void main(String[] args) {
- JarClassLoader jcl = new JarClassLoader();
+ JarClassLoader jcl = new JarClassLoader(Thread.currentThread().getContextClassLoader());
try {
jcl.invokeMain("org.usergrid.standalone.Server", args);
} catch (Throwable e) {
e.printStackTrace();
}
} // main()
}
| true | true | public static void main(String[] args) {
JarClassLoader jcl = new JarClassLoader();
try {
jcl.invokeMain("org.usergrid.standalone.Server", args);
} catch (Throwable e) {
e.printStackTrace();
}
} // main()
| public static void main(String[] args) {
JarClassLoader jcl = new JarClassLoader(Thread.currentThread().getContextClassLoader());
try {
jcl.invokeMain("org.usergrid.standalone.Server", args);
} catch (Throwable e) {
e.printStackTrace();
}
} // main()
|
diff --git a/ecologylab/xml/types/scalar/ParsedURLType.java b/ecologylab/xml/types/scalar/ParsedURLType.java
index 608f3edd..1f9b1c04 100644
--- a/ecologylab/xml/types/scalar/ParsedURLType.java
+++ b/ecologylab/xml/types/scalar/ParsedURLType.java
@@ -1,110 +1,110 @@
/*
* Created on Dec 31, 2004 at the Interface Ecology Lab.
*/
package ecologylab.xml.types.scalar;
import java.io.File;
import ecologylab.net.ParsedURL;
/**
* Type system entry for java.awt.Color. Uses a hex string as initialization.
*
* @author andruid
*/
public class ParsedURLType extends ReferenceType<ParsedURL>
{
/**
* This constructor should only be called once per session, through
* a static initializer, typically in TypeRegistry.
* <p>
* To get the instance of this type object for use in translations, call
* <code>TypeRegistry.get("cm.generic.ParsedURL")</code>.
*
*/
public ParsedURLType()
{
super(ParsedURL.class);
}
/**
* Looks for file in value, and creates a ParsedURL with file set if appropriate.
* Otherwise, calls ParsedURL.getAbsolute().
*
* @param value String to marshall into a typed instance.
*
* @see ecologylab.xml.types.scalar.ScalarType#getInstance(java.lang.String)
*/
public ParsedURL getInstance(String value)
{
File file = null;
if (value.startsWith("file://"))
{
int startIndex = value.startsWith("file:///") ? 8 : 7;
value = value.substring(startIndex);
file = ecologylab.io.Files.newFile(value);
}
else if (value.indexOf(':') == 1)
{
file = ecologylab.io.Files.newFile(value);
}
return (file != null) ? new ParsedURL(file)
- : ParsedURL.getAbsolute(value, " getInstance()");
+ : ParsedURL.getAbsolute(value, "ParsedURLType.getInstance()");
}
public static final String URL_DELIMS = "/&?";
/**
* For editing: these are the valid delimiters for separating tokens that make up a field
* of this type.
*
* @return
*/
public String delimeters()
{
return URL_DELIMS;
}
public boolean allowNewLines()
{
return false;
}
/**
* When editing, determines whether delimiters can be included in token strings.
*
* @return true for URLs
*/
//FIXME -- Add String delimitersAfter to TextChunk -- interleaved with TextTokens, and
//get rid of this!!!
public boolean allowDelimitersInTokens()
{
return true;
}
/**
* When editing, do not allow the user to include these characters in the resulting value String.
* @return
*/
public String illegalChars()
{
return " !{}\t\n\r";
}
/**
* When editing, is the field one that should be part of the Term model?
*
* @return false for URLs
*/
public boolean composedOfTerms()
{
return false;
}
/**
* True if the user should be able to express interest in fields of this type.
*
* @return false for URLs
*/
public boolean affordsInterestExpression()
{
return false;
}
}
| true | true | public ParsedURL getInstance(String value)
{
File file = null;
if (value.startsWith("file://"))
{
int startIndex = value.startsWith("file:///") ? 8 : 7;
value = value.substring(startIndex);
file = ecologylab.io.Files.newFile(value);
}
else if (value.indexOf(':') == 1)
{
file = ecologylab.io.Files.newFile(value);
}
return (file != null) ? new ParsedURL(file)
: ParsedURL.getAbsolute(value, " getInstance()");
}
| public ParsedURL getInstance(String value)
{
File file = null;
if (value.startsWith("file://"))
{
int startIndex = value.startsWith("file:///") ? 8 : 7;
value = value.substring(startIndex);
file = ecologylab.io.Files.newFile(value);
}
else if (value.indexOf(':') == 1)
{
file = ecologylab.io.Files.newFile(value);
}
return (file != null) ? new ParsedURL(file)
: ParsedURL.getAbsolute(value, "ParsedURLType.getInstance()");
}
|
diff --git a/trunk/src/main/java/example/deploy/text/TextContentParser.java b/trunk/src/main/java/example/deploy/text/TextContentParser.java
index e54d155..c00e15d 100644
--- a/trunk/src/main/java/example/deploy/text/TextContentParser.java
+++ b/trunk/src/main/java/example/deploy/text/TextContentParser.java
@@ -1,311 +1,312 @@
package example.deploy.text;
import static com.polopoly.cm.server.ServerNames.CONTENT_ATTRG_SYSTEM;
import static com.polopoly.cm.server.ServerNames.CONTENT_ATTR_NAME;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.polopoly.cm.server.ServerNames;
import example.deploy.hotdeploy.client.Major;
public class TextContentParser {
private static final Logger LOGGER = Logger
.getLogger(TextContentParser.class.getName());
public static final char SEPARATOR_CHAR = ':';
public static final String TEXT_CONTENT_FILE_EXTENSION = "content";
public static final String ID_PREFIX = "id";
public static final String INPUT_TEMPLATE_PREFIX = "inputtemplate";
public static final String NAME_PREFIX = "name";
public static final String SECURITY_PARENT_PREFIX = "securityparent";
public static final String COMPONENT_PREFIX = "component";
public static final String REFERENCE_PREFIX = "ref";
public static final String LIST_PREFIX = "list";
public static final String TEMPLATE_PREFIX = "template";
public static final String PUBLISH_PREFIX = "publish";
public static final String MAJOR_PREFIX = "major";
public static final String FILE_PREFIX = "file";
private BufferedReader reader;
private TextContentSet parsed = new TextContentSet();
private TextContent currentContent;
private String line;
private int atLine;
private URL contentUrl;
private String fileName;
public TextContentParser(InputStream inputStream, URL contentUrl,
String fileName) throws IOException {
reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
this.contentUrl = contentUrl;
int i = fileName.lastIndexOf('/');
if (i != -1) {
fileName = fileName.substring(i + 1);
}
if (fileName.endsWith('.' + TEXT_CONTENT_FILE_EXTENSION)) {
fileName = fileName.substring(0, fileName.length()
- TEXT_CONTENT_FILE_EXTENSION.length() - 1);
} else {
LOGGER.log(Level.WARNING, "Expected file name " + fileName
+ " to end with ." + TEXT_CONTENT_FILE_EXTENSION + ".");
}
this.fileName = fileName;
}
public TextContentSet parse() throws IOException, ParseException {
while ((line = reader.readLine()) != null) {
atLine++;
parseLine(line);
}
return parsed;
}
private void parseLine(String line) throws ParseException {
if (line.startsWith("#")) {
return;
}
String[] fields = split(line);
if (fields.length < 2) {
if (!line.trim().equals("")) {
fail("Unrecognized line.");
}
return;
}
String prefix = fields[0];
if (prefix.equals(ID_PREFIX)) {
assertFields(2, fields);
currentContent = new TextContent();
currentContent.setId(expandId(fields[1]));
parsed.add(currentContent);
return;
}
if (currentContent == null) {
fail("Add an \"" + ID_PREFIX + ":\" line first.");
return;
}
if (prefix.equals(INPUT_TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setInputTemplate(new ExternalIdReference(fields[1]));
} else if (prefix.equals(NAME_PREFIX)) {
assertFields(2, fields);
// TODO: replace with constants.
currentContent.setComponent(CONTENT_ATTRG_SYSTEM,
CONTENT_ATTR_NAME, fields[1]);
} else if (prefix.equals(SECURITY_PARENT_PREFIX)) {
assertFields(2, fields);
currentContent.setSecurityParent(new ExternalIdReference(
expandId(fields[1])));
} else if (prefix.equals(COMPONENT_PREFIX)) {
assertFields(4, fields);
currentContent.setComponent(fields[1], fields[2], fields[3]);
} else if (prefix.equals(REFERENCE_PREFIX)) {
assertFields(4, fields);
currentContent.setReference(fields[1], fields[2],
new ExternalIdReference(expandId(fields[3])));
} else if (prefix.equals(FILE_PREFIX)) {
assertFields(3, fields);
try {
URL fileUrl = new URL(contentUrl, fields[2]);
currentContent.addFile(fields[1], fileUrl.openStream());
} catch (MalformedURLException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
} catch (IOException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
}
} else if (prefix.equals(LIST_PREFIX)) {
String group = null;
String referredId = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
referredId = fields[1];
} else if (fields.length == 3) {
group = fields[1];
referredId = fields[2];
} else if (fields.length == 4) {
+ group = fields[1];
metadata = fields[3];
referredId = fields[2];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). "
+ "The parameters are: group (optionalunless reference metadata is provided), "
+ "referred object, reference metadata (optional).");
}
currentContent.getList(group).add(
new ExternalIdReference(expandId(referredId),
expandId(metadata)));
} else if (prefix.equals(TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setTemplateId(fields[1]);
} else if (prefix.equals(MAJOR_PREFIX)) {
assertFields(2, fields);
String majorString = fields[1].trim();
try {
int intMajor = Integer.parseInt(majorString);
currentContent.setMajor(Major.getMajor(intMajor));
} catch (NumberFormatException e) {
Major major = Major.getMajor(majorString);
if (major == Major.UNKNOWN) {
fail("Unknown major \"" + majorString + "\".");
}
currentContent.setMajor(major);
}
} else if (prefix.equals(PUBLISH_PREFIX)) {
String group = null;
String publishIn = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
publishIn = fields[1];
} else if (fields.length == 3) {
group = fields[1];
publishIn = fields[2];
} else if (fields.length == 4) {
group = fields[1];
publishIn = fields[2];
metadata = fields[3];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). The parameters are: "
+ "group (optional unless reference metadata is provided), object to publish in, "
+ "reference metadata (optional).");
}
Publishing publishing = new Publishing(new ExternalIdReference(
expandId(publishIn), expandId(metadata)), group);
currentContent.addPublishing(publishing);
} else {
fail("Line should start with " + ID_PREFIX + ", "
+ INPUT_TEMPLATE_PREFIX + ", " + NAME_PREFIX + ", "
+ SECURITY_PARENT_PREFIX + ", " + COMPONENT_PREFIX + ", "
+ MAJOR_PREFIX + ", " + REFERENCE_PREFIX + " or "
+ LIST_PREFIX + ".");
}
}
private String expandId(String externalId) {
// reference metadata may be null.
if (externalId == null) {
return null;
}
if (externalId.startsWith(".")) {
return fileName + externalId;
} else {
return externalId;
}
}
private String[] split(String line) {
List<String> result = new ArrayList<String>();
boolean quote = false;
StringBuffer current = new StringBuffer(100);
for (int i = 0; i < line.length(); i++) {
char ch = line.charAt(i);
if (ch == SEPARATOR_CHAR && !quote) {
result.add(current.toString());
current.setLength(0);
} else if (ch == '\\' && !quote) {
quote = true;
} else if (quote && ch == 'n') {
current.append('\n');
} else {
current.append(ch);
quote = false;
}
}
result.add(current.toString());
return result.toArray(new String[result.size()]);
}
private void fail(String message) throws ParseException {
throw new ParseException(message, line, atLine);
}
private void assertFields(int expectedFields, String[] fields)
throws ParseException {
if (fields.length != expectedFields) {
fail("Expected " + (expectedFields - 1)
+ " parameters for operation " + fields[0]
+ " (rather than the provided " + (fields.length - 1)
+ ").");
}
}
public String getFileName() {
return fileName;
}
}
| true | true | private void parseLine(String line) throws ParseException {
if (line.startsWith("#")) {
return;
}
String[] fields = split(line);
if (fields.length < 2) {
if (!line.trim().equals("")) {
fail("Unrecognized line.");
}
return;
}
String prefix = fields[0];
if (prefix.equals(ID_PREFIX)) {
assertFields(2, fields);
currentContent = new TextContent();
currentContent.setId(expandId(fields[1]));
parsed.add(currentContent);
return;
}
if (currentContent == null) {
fail("Add an \"" + ID_PREFIX + ":\" line first.");
return;
}
if (prefix.equals(INPUT_TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setInputTemplate(new ExternalIdReference(fields[1]));
} else if (prefix.equals(NAME_PREFIX)) {
assertFields(2, fields);
// TODO: replace with constants.
currentContent.setComponent(CONTENT_ATTRG_SYSTEM,
CONTENT_ATTR_NAME, fields[1]);
} else if (prefix.equals(SECURITY_PARENT_PREFIX)) {
assertFields(2, fields);
currentContent.setSecurityParent(new ExternalIdReference(
expandId(fields[1])));
} else if (prefix.equals(COMPONENT_PREFIX)) {
assertFields(4, fields);
currentContent.setComponent(fields[1], fields[2], fields[3]);
} else if (prefix.equals(REFERENCE_PREFIX)) {
assertFields(4, fields);
currentContent.setReference(fields[1], fields[2],
new ExternalIdReference(expandId(fields[3])));
} else if (prefix.equals(FILE_PREFIX)) {
assertFields(3, fields);
try {
URL fileUrl = new URL(contentUrl, fields[2]);
currentContent.addFile(fields[1], fileUrl.openStream());
} catch (MalformedURLException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
} catch (IOException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
}
} else if (prefix.equals(LIST_PREFIX)) {
String group = null;
String referredId = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
referredId = fields[1];
} else if (fields.length == 3) {
group = fields[1];
referredId = fields[2];
} else if (fields.length == 4) {
metadata = fields[3];
referredId = fields[2];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). "
+ "The parameters are: group (optionalunless reference metadata is provided), "
+ "referred object, reference metadata (optional).");
}
currentContent.getList(group).add(
new ExternalIdReference(expandId(referredId),
expandId(metadata)));
} else if (prefix.equals(TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setTemplateId(fields[1]);
} else if (prefix.equals(MAJOR_PREFIX)) {
assertFields(2, fields);
String majorString = fields[1].trim();
try {
int intMajor = Integer.parseInt(majorString);
currentContent.setMajor(Major.getMajor(intMajor));
} catch (NumberFormatException e) {
Major major = Major.getMajor(majorString);
if (major == Major.UNKNOWN) {
fail("Unknown major \"" + majorString + "\".");
}
currentContent.setMajor(major);
}
} else if (prefix.equals(PUBLISH_PREFIX)) {
String group = null;
String publishIn = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
publishIn = fields[1];
} else if (fields.length == 3) {
group = fields[1];
publishIn = fields[2];
} else if (fields.length == 4) {
group = fields[1];
publishIn = fields[2];
metadata = fields[3];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). The parameters are: "
+ "group (optional unless reference metadata is provided), object to publish in, "
+ "reference metadata (optional).");
}
Publishing publishing = new Publishing(new ExternalIdReference(
expandId(publishIn), expandId(metadata)), group);
currentContent.addPublishing(publishing);
} else {
fail("Line should start with " + ID_PREFIX + ", "
+ INPUT_TEMPLATE_PREFIX + ", " + NAME_PREFIX + ", "
+ SECURITY_PARENT_PREFIX + ", " + COMPONENT_PREFIX + ", "
+ MAJOR_PREFIX + ", " + REFERENCE_PREFIX + " or "
+ LIST_PREFIX + ".");
}
}
| private void parseLine(String line) throws ParseException {
if (line.startsWith("#")) {
return;
}
String[] fields = split(line);
if (fields.length < 2) {
if (!line.trim().equals("")) {
fail("Unrecognized line.");
}
return;
}
String prefix = fields[0];
if (prefix.equals(ID_PREFIX)) {
assertFields(2, fields);
currentContent = new TextContent();
currentContent.setId(expandId(fields[1]));
parsed.add(currentContent);
return;
}
if (currentContent == null) {
fail("Add an \"" + ID_PREFIX + ":\" line first.");
return;
}
if (prefix.equals(INPUT_TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setInputTemplate(new ExternalIdReference(fields[1]));
} else if (prefix.equals(NAME_PREFIX)) {
assertFields(2, fields);
// TODO: replace with constants.
currentContent.setComponent(CONTENT_ATTRG_SYSTEM,
CONTENT_ATTR_NAME, fields[1]);
} else if (prefix.equals(SECURITY_PARENT_PREFIX)) {
assertFields(2, fields);
currentContent.setSecurityParent(new ExternalIdReference(
expandId(fields[1])));
} else if (prefix.equals(COMPONENT_PREFIX)) {
assertFields(4, fields);
currentContent.setComponent(fields[1], fields[2], fields[3]);
} else if (prefix.equals(REFERENCE_PREFIX)) {
assertFields(4, fields);
currentContent.setReference(fields[1], fields[2],
new ExternalIdReference(expandId(fields[3])));
} else if (prefix.equals(FILE_PREFIX)) {
assertFields(3, fields);
try {
URL fileUrl = new URL(contentUrl, fields[2]);
currentContent.addFile(fields[1], fileUrl.openStream());
} catch (MalformedURLException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
} catch (IOException e) {
fail("Could not read file " + fields[2] + " relative to "
+ contentUrl + ".");
}
} else if (prefix.equals(LIST_PREFIX)) {
String group = null;
String referredId = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
referredId = fields[1];
} else if (fields.length == 3) {
group = fields[1];
referredId = fields[2];
} else if (fields.length == 4) {
group = fields[1];
metadata = fields[3];
referredId = fields[2];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). "
+ "The parameters are: group (optionalunless reference metadata is provided), "
+ "referred object, reference metadata (optional).");
}
currentContent.getList(group).add(
new ExternalIdReference(expandId(referredId),
expandId(metadata)));
} else if (prefix.equals(TEMPLATE_PREFIX)) {
assertFields(2, fields);
currentContent.setTemplateId(fields[1]);
} else if (prefix.equals(MAJOR_PREFIX)) {
assertFields(2, fields);
String majorString = fields[1].trim();
try {
int intMajor = Integer.parseInt(majorString);
currentContent.setMajor(Major.getMajor(intMajor));
} catch (NumberFormatException e) {
Major major = Major.getMajor(majorString);
if (major == Major.UNKNOWN) {
fail("Unknown major \"" + majorString + "\".");
}
currentContent.setMajor(major);
}
} else if (prefix.equals(PUBLISH_PREFIX)) {
String group = null;
String publishIn = null;
String metadata = null;
if (fields.length == 2) {
group = ServerNames.DEPARTMENT_ATTRG_SYSTEM;
publishIn = fields[1];
} else if (fields.length == 3) {
group = fields[1];
publishIn = fields[2];
} else if (fields.length == 4) {
group = fields[1];
publishIn = fields[2];
metadata = fields[3];
} else {
fail("Expected one, two or three parameters for operation "
+ fields[0]
+ " (rather than the provided "
+ (fields.length - 1)
+ "). The parameters are: "
+ "group (optional unless reference metadata is provided), object to publish in, "
+ "reference metadata (optional).");
}
Publishing publishing = new Publishing(new ExternalIdReference(
expandId(publishIn), expandId(metadata)), group);
currentContent.addPublishing(publishing);
} else {
fail("Line should start with " + ID_PREFIX + ", "
+ INPUT_TEMPLATE_PREFIX + ", " + NAME_PREFIX + ", "
+ SECURITY_PARENT_PREFIX + ", " + COMPONENT_PREFIX + ", "
+ MAJOR_PREFIX + ", " + REFERENCE_PREFIX + " or "
+ LIST_PREFIX + ".");
}
}
|
diff --git a/src/main/java/org/sunnycode/hash/file2/UuidHelper.java b/src/main/java/org/sunnycode/hash/file2/UuidHelper.java
index 5eee6fb..85705dc 100644
--- a/src/main/java/org/sunnycode/hash/file2/UuidHelper.java
+++ b/src/main/java/org/sunnycode/hash/file2/UuidHelper.java
@@ -1,15 +1,15 @@
package org.sunnycode.hash.file2;
import java.nio.ByteBuffer;
import java.util.UUID;
public class UuidHelper {
public static byte[] uuidToBytes(UUID uuid) {
- ByteBuffer buf = ByteBuffer.allocate(8);
+ ByteBuffer buf = ByteBuffer.allocate(16);
buf.putLong(uuid.getMostSignificantBits());
buf.putLong(uuid.getLeastSignificantBits());
return buf.array();
}
}
| true | true | public static byte[] uuidToBytes(UUID uuid) {
ByteBuffer buf = ByteBuffer.allocate(8);
buf.putLong(uuid.getMostSignificantBits());
buf.putLong(uuid.getLeastSignificantBits());
return buf.array();
}
| public static byte[] uuidToBytes(UUID uuid) {
ByteBuffer buf = ByteBuffer.allocate(16);
buf.putLong(uuid.getMostSignificantBits());
buf.putLong(uuid.getLeastSignificantBits());
return buf.array();
}
|
diff --git a/jdbc/examples/TestServer.java b/jdbc/examples/TestServer.java
index c94133fa..0c5ca58d 100644
--- a/jdbc/examples/TestServer.java
+++ b/jdbc/examples/TestServer.java
@@ -1,68 +1,73 @@
package examples;
import java.sql.*;
import java.util.*;
import java.lang.*;
import org.postgis.*;
public class TestServer
{
public static void main(String[] args)
{
Connection conn;
String dbname = "tb";
String dbuser = "dblasby";
String dbpass = "";
String dbhost = "ox";
String dbport = "5555";
String dbtable = "jdbc_test";
String dropSQL = "drop table " + dbtable;
String createSQL = "create table " + dbtable + " (geom geometry, id int4)";
String insertPointSQL = "insert into " + dbtable + " values ('POINT (10 10 10)',1)";
String insertPolygonSQL = "insert into " + dbtable + " values ('POLYGON ((0 0 0,0 10 0,10 10 0,10 0 0,0 0 0))',2)";
try {
System.out.println("Creating JDBC connection...");
Class.forName("org.postgresql.Driver");
String url = "jdbc:postgresql://" + dbhost + ":" + dbport + "/" + dbname;
conn = DriverManager.getConnection(url, dbuser, dbpass);
System.out.println("Adding geometric type entries...");
((org.postgresql.Connection)conn).addDataType("geometry","org.postgis.PGgeometry");
((org.postgresql.Connection)conn).addDataType("box3d","org.postgis.PGbox3d");
Statement s = conn.createStatement();
System.out.println("Creating table with geometric types...");
- s.execute(dropSQL);
+ //table might not yet exist
+ try {
+ s.execute(dropSQL);
+ } catch(Exception e) {
+ e.printStackTrace();
+ }
s.execute(createSQL);
System.out.println("Inserting point...");
s.execute(insertPointSQL);
System.out.println("Inserting polygon...");
s.execute(insertPolygonSQL);
System.out.println("Done.");
s = conn.createStatement();
System.out.println("Querying table...");
- ResultSet r = s.executeQuery("select * from " + dbtable);
+ ResultSet r = s.executeQuery("select asText(geom),id from " + dbtable);
while( r.next() )
{
Object obj = r.getObject(1);
int id = r.getInt(2);
System.out.println("Row " + id + ":");
System.out.println(obj.toString());
}
s.close();
conn.close();
}
catch( Exception e ) {
e.printStackTrace();
}
}
}
| false | true | public static void main(String[] args)
{
Connection conn;
String dbname = "tb";
String dbuser = "dblasby";
String dbpass = "";
String dbhost = "ox";
String dbport = "5555";
String dbtable = "jdbc_test";
String dropSQL = "drop table " + dbtable;
String createSQL = "create table " + dbtable + " (geom geometry, id int4)";
String insertPointSQL = "insert into " + dbtable + " values ('POINT (10 10 10)',1)";
String insertPolygonSQL = "insert into " + dbtable + " values ('POLYGON ((0 0 0,0 10 0,10 10 0,10 0 0,0 0 0))',2)";
try {
System.out.println("Creating JDBC connection...");
Class.forName("org.postgresql.Driver");
String url = "jdbc:postgresql://" + dbhost + ":" + dbport + "/" + dbname;
conn = DriverManager.getConnection(url, dbuser, dbpass);
System.out.println("Adding geometric type entries...");
((org.postgresql.Connection)conn).addDataType("geometry","org.postgis.PGgeometry");
((org.postgresql.Connection)conn).addDataType("box3d","org.postgis.PGbox3d");
Statement s = conn.createStatement();
System.out.println("Creating table with geometric types...");
s.execute(dropSQL);
s.execute(createSQL);
System.out.println("Inserting point...");
s.execute(insertPointSQL);
System.out.println("Inserting polygon...");
s.execute(insertPolygonSQL);
System.out.println("Done.");
s = conn.createStatement();
System.out.println("Querying table...");
ResultSet r = s.executeQuery("select * from " + dbtable);
while( r.next() )
{
Object obj = r.getObject(1);
int id = r.getInt(2);
System.out.println("Row " + id + ":");
System.out.println(obj.toString());
}
s.close();
conn.close();
}
catch( Exception e ) {
e.printStackTrace();
}
}
| public static void main(String[] args)
{
Connection conn;
String dbname = "tb";
String dbuser = "dblasby";
String dbpass = "";
String dbhost = "ox";
String dbport = "5555";
String dbtable = "jdbc_test";
String dropSQL = "drop table " + dbtable;
String createSQL = "create table " + dbtable + " (geom geometry, id int4)";
String insertPointSQL = "insert into " + dbtable + " values ('POINT (10 10 10)',1)";
String insertPolygonSQL = "insert into " + dbtable + " values ('POLYGON ((0 0 0,0 10 0,10 10 0,10 0 0,0 0 0))',2)";
try {
System.out.println("Creating JDBC connection...");
Class.forName("org.postgresql.Driver");
String url = "jdbc:postgresql://" + dbhost + ":" + dbport + "/" + dbname;
conn = DriverManager.getConnection(url, dbuser, dbpass);
System.out.println("Adding geometric type entries...");
((org.postgresql.Connection)conn).addDataType("geometry","org.postgis.PGgeometry");
((org.postgresql.Connection)conn).addDataType("box3d","org.postgis.PGbox3d");
Statement s = conn.createStatement();
System.out.println("Creating table with geometric types...");
//table might not yet exist
try {
s.execute(dropSQL);
} catch(Exception e) {
e.printStackTrace();
}
s.execute(createSQL);
System.out.println("Inserting point...");
s.execute(insertPointSQL);
System.out.println("Inserting polygon...");
s.execute(insertPolygonSQL);
System.out.println("Done.");
s = conn.createStatement();
System.out.println("Querying table...");
ResultSet r = s.executeQuery("select asText(geom),id from " + dbtable);
while( r.next() )
{
Object obj = r.getObject(1);
int id = r.getInt(2);
System.out.println("Row " + id + ":");
System.out.println(obj.toString());
}
s.close();
conn.close();
}
catch( Exception e ) {
e.printStackTrace();
}
}
|
diff --git a/src/it/chalmers/dat255_bearded_octo_lama/activities/AddAlarmActivity.java b/src/it/chalmers/dat255_bearded_octo_lama/activities/AddAlarmActivity.java
index d93c6ae..9af6a03 100644
--- a/src/it/chalmers/dat255_bearded_octo_lama/activities/AddAlarmActivity.java
+++ b/src/it/chalmers/dat255_bearded_octo_lama/activities/AddAlarmActivity.java
@@ -1,426 +1,427 @@
/**
* Copyright (C) 2012 Emil Edholm, Emil Johansson, Johan Andersson, Johan Gustafsson
*
* This file is part of dat255-bearded-octo-lama
*
* dat255-bearded-octo-lama is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* dat255-bearded-octo-lama is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with dat255-bearded-octo-lama. If not, see <http://www.gnu.org/licenses/>.
*
*/
package it.chalmers.dat255_bearded_octo_lama.activities;
import it.chalmers.dat255_bearded_octo_lama.Alarm;
import it.chalmers.dat255_bearded_octo_lama.AlarmController;
import it.chalmers.dat255_bearded_octo_lama.R;
import it.chalmers.dat255_bearded_octo_lama.R.array;
import it.chalmers.dat255_bearded_octo_lama.R.id;
import it.chalmers.dat255_bearded_octo_lama.R.layout;
import it.chalmers.dat255_bearded_octo_lama.games.GameManager;
import it.chalmers.dat255_bearded_octo_lama.utilities.Filter;
import it.chalmers.dat255_bearded_octo_lama.utilities.RingtoneFinder;
import it.chalmers.dat255_bearded_octo_lama.utilities.Time;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import android.content.res.TypedArray;
import android.media.Ringtone;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.Spinner;
import android.widget.TabHost;
import android.widget.TextView;
import android.widget.Toast;
/**
* Lets the user selects an alarm time and add it.
* @author Emil Edholm
* @date 27 sep 2012
* @modified by Emil Johansson
* @date 14 oct 2012
*/
public final class AddAlarmActivity extends AbstractActivity implements OnItemSelectedListener {
private Button currentTimeButton;
private final TimeFilter filter = new TimeFilter();
private boolean setAlarmAT = true; // if false, set alarm to an interval instead.
private List<Ringtone> tones = new ArrayList<Ringtone>();
private final List<Ringtone> selectedTones = new ArrayList<Ringtone>();
private final ArrayList<String> gamesList= new ArrayList<String>();
private final ArrayList<Integer> snoozeList= new ArrayList<Integer>();
private String choosenGame;
private CheckBox vibration, sound, games;
private int snoozeInterval;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(layout.activity_alarm_menu);
initTabs();
initSettings();
Spinner spinner = (Spinner)findViewById(id.time_options_spinner);
spinner.setOnItemSelectedListener(this);
// Set to the first (hour 0) button.
selectTimeButton(id.h0);
}
private void initSettings() {
//Checkboxes for turning on/off sound, vibration and games
vibration = (CheckBox)findViewById(id.vibration);
sound = (CheckBox)findViewById(id.sound);
games = (CheckBox)findViewById(id.games);
//Get all spinner views from the xml.
Spinner soundSpinner = (Spinner)findViewById(id.sound_list_spinner);
Spinner gameSpinner = (Spinner)findViewById(id.games_list_spinner);
Spinner snoozeSpinner = (Spinner)findViewById(id.snooze_list_spinner);
// Init the sound spinner.
ArrayList<String> songs = new ArrayList<String>();
tones = RingtoneFinder.getRingtones(this);
for(Ringtone r:tones){
songs.add(r.getTitle(getBaseContext()));
}
soundSpinner.setAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, songs));
soundSpinner.setOnItemSelectedListener(new SoundSpinnerListener());
//Init the game spinner.
String[] tempGamesString = GameManager.getAvailableGamesStrings();
for(int i=0; i < tempGamesString.length; i++){
gamesList.add(tempGamesString[i]);
}
gameSpinner.setAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, gamesList));
gameSpinner.setOnItemSelectedListener(new GameSpinnerListener());
//Init the snooze interval spinner.
for(int i = 1; i <= 10; i++){
snoozeList.add(i);
}
snoozeSpinner.setAdapter(new ArrayAdapter<Integer>(this, android.R.layout.simple_spinner_item, snoozeList));
snoozeSpinner.setOnItemSelectedListener(new SnoozeSpinnerListener());
}
/**
* This method will create customized tab views overriding the old bland android tab view.
*/
private void initTabs() {
TabHost tabs = (TabHost) findViewById(R.id.tabhost);
tabs.setup();
TabHost.TabSpec spec;
//Setup first tab with specified text.
spec = tabs.newTabSpec("tag1");
spec.setContent(R.id.tab1);
createTabView(spec, "Add alarm");
tabs.addTab(spec);
//Setup second tab with specified text.
spec = tabs.newTabSpec("tag2");
spec.setContent(R.id.tab2);
createTabView(spec, "Settings");
tabs.addTab(spec);
}
/**
* Helper method for creating tabs with unique text
*/
private View createTabView(TabHost.TabSpec spec, String text) {
View view = LayoutInflater.from(this).inflate(R.layout.tabs_layout, null);
TextView tabText = (TextView) view.findViewById(R.id.tabText);
tabText.setText(text);
spec.setIndicator(view);
return view;
}
/**
* Retrieves the time from the "time" buttons.
* @return returns an integer array with four values representing the time in hh:mm format where {@code h0 = int[0]; h1 = int[1]} etc.
*/
private int[] queryTimeValues() {
int numberOfBtns = 4;
int[] time = new int[numberOfBtns];
time[0] = getButtonNumber(R.id.h0);
time[1] = getButtonNumber(R.id.h1);
time[2] = getButtonNumber(R.id.m0);
time[3] = getButtonNumber(R.id.m1);
return time;
}
/** Select a specific time button based on ID */
private void selectTimeButton(int id) {
if(currentTimeButton != null) {
currentTimeButton.setBackgroundColor(getResources().getColor(R.color.white));
}
currentTimeButton = (Button)findViewById(id);
filter.setTimeButtonId(id);
currentTimeButton.setBackgroundColor(getResources().getColor(R.color.green));
}
/** Selects the next "time" button */
private void selectNextTimeButton() {
switch(currentTimeButton.getId()) {
case R.id.h0:
selectTimeButton(id.h1);
break;
case R.id.h1:
selectTimeButton(id.m0);
break;
case R.id.m0:
selectTimeButton(id.m1);
break;
default:
selectTimeButton(id.h0);
break;
}
}
/** When the user clicks the add button (ie. when he is finished) */
private void addAlarm() {
int[] time = queryTimeValues();
int timeBase = 10;
int hour, minute;
// Combine values from format hh:mm to h:m.
if(setAlarmAT) {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
}
else {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
// Time now + value selected equals sometime in the future.
Calendar cal = Calendar.getInstance();
cal.add(Calendar.HOUR_OF_DAY, hour);
cal.add(Calendar.MINUTE, minute);
hour = cal.get(Calendar.HOUR_OF_DAY);
minute = cal.get(Calendar.MINUTE);
}
AlarmController ac = AlarmController.INSTANCE;
// Defines the options for the alarm.
Alarm.Extras extras = new Alarm.Extras.Builder()
.useVibration(vibration.isChecked())
.useSound(sound.isChecked())
.gameNotification(games.isChecked())
.gameName(choosenGame)
.snoozeInterval(snoozeInterval)
+ .addRingtoneIDs(RingtoneFinder.findRingtoneID(this, selectedTones))
.build();
Uri uri = ac.addAlarm(getApplicationContext(), true, hour, minute, extras);
Alarm a = ac.getAlarm(this, ac.extractIDFromUri(uri));
Toast.makeText(getApplicationContext(), "Alarm added at " + hour + ":" + minute + ". Time left: " + Time.getTimeLeft(a.getTimeInMS()), Toast.LENGTH_SHORT).show();
finish();
}
/**
* Event handler for when one of the buttons on the numbpad is clicked.
* @param view the button that was clicked.
*/
public void onNumpadClick(View view) {
// Check if allowed number and if so, select next time button
int numClicked = getButtonNumber((Button)view);
int h1 = getButtonNumber(id.h1);
if(filter.accept(numClicked)) {
currentTimeButton.setText(numClicked + "");
if(currentTimeButton.getId() == R.id.h0 && numClicked == 2 && h1 > 3) {
Button b = (Button) findViewById(R.id.h1);
b.setText("0");
}
selectNextTimeButton();
}
}
/**
* @return number on the button
*/
private int getButtonNumber(Button button) {
return Integer.parseInt(button.getText().toString());
}
/**
* @return the number on the button, -1 if not a button or error occurred.
*/
private int getButtonNumber(int id) {
View v = findViewById(id);
if(v instanceof Button) {
return getButtonNumber((Button)v);
}
return -1;
}
/**
* Event handler for when one of the buttons indicating the time is clicked.
* @param view the button that was clicked.
*/
public void onTimeClick(View view) {
selectTimeButton(view.getId());
}
/** What happens when an item is selected on the options spinner */
public void onItemSelected(AdapterView<?> parent, View view,
int pos, long id) {
String option = String.valueOf(parent.getItemAtPosition(pos));
TypedArray options = getResources().obtainTypedArray(array.time_options_array);
// Assumes a static position of the options value and "Alarm at" at position 0 and "Alarm in" at pos 1.
// There is probably a better way to do this...
setAlarmAT = option.equals(options.getString(0));
}
public void onNothingSelected(AdapterView<?> parent) { /* Do nothing */ }
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_add:
addAlarm();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.activity_add_alarm, menu);
return true;
}
private static class TimeFilter implements Filter<Integer> {
private int selectedTimeButtonId, h0;
public boolean accept(Integer i) {
switch(selectedTimeButtonId) {
case id.h0:
h0 = i;
return i <= 2;
case id.h1:
return i <= 3 || h0 != 2;
case id.m0:
return i <= 5;
case id.m1:
return i <= 9;
}
return false;
}
public void setTimeButtonId(int id) {
selectedTimeButtonId = id;
}
}
public void addTestAlarm(View v) {
AlarmController ac = AlarmController.INSTANCE;
int countdown = 5;
Calendar cal = Calendar.getInstance();
cal.add(Calendar.SECOND, countdown);
// Defines the options for the test alarm.
Alarm.Extras extras = new Alarm.Extras.Builder()
.useVibration(vibration.isChecked())
.useSound(sound.isChecked())
.gameNotification(games.isChecked())
.gameName(choosenGame)
.snoozeInterval(snoozeInterval)
.addRingtoneIDs(RingtoneFinder.findRingtoneID(this, selectedTones))
.build();
ac.addAlarm(this, true, cal.getTimeInMillis(), extras);
Toast.makeText(getApplicationContext(), "Alarm added 5 seconds from now", Toast.LENGTH_SHORT).show();
finish();
}
/**
* Private class for listening to the Spinner in settings that chooses which sound to play
* @author e
*
*/
private class SoundSpinnerListener implements OnItemSelectedListener {
public void onItemSelected(AdapterView<?> parent, View view, int pos,
long id) {
selectedTones.clear();
Log.d("AddAlarmActivity", tones.get(pos).getTitle(getApplicationContext()));
selectedTones.add(tones.get(pos));
}
public void onNothingSelected(AdapterView<?> parent) {
// Do Nothing
}
}
/**
* Private class for listening to the Spinner in settings that chooses which game to play
* @author e
*
*/
private class GameSpinnerListener implements OnItemSelectedListener {
public void onItemSelected(AdapterView<?> parent, View view, int pos,
long id) {
choosenGame = gamesList.get(pos);
}
public void onNothingSelected(AdapterView<?> parent) {
// Do Nothing
}
}
/**
* Private class for listening to the Spinner in settings that chooses which game to play
* @author e
*
*/
private class SnoozeSpinnerListener implements OnItemSelectedListener {
public void onItemSelected(AdapterView<?> parent, View view, int pos,
long id) {
snoozeInterval = snoozeList.get(pos);
}
public void onNothingSelected(AdapterView<?> parent) {
// Do Nothing
}
}
}
| true | true | private void addAlarm() {
int[] time = queryTimeValues();
int timeBase = 10;
int hour, minute;
// Combine values from format hh:mm to h:m.
if(setAlarmAT) {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
}
else {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
// Time now + value selected equals sometime in the future.
Calendar cal = Calendar.getInstance();
cal.add(Calendar.HOUR_OF_DAY, hour);
cal.add(Calendar.MINUTE, minute);
hour = cal.get(Calendar.HOUR_OF_DAY);
minute = cal.get(Calendar.MINUTE);
}
AlarmController ac = AlarmController.INSTANCE;
// Defines the options for the alarm.
Alarm.Extras extras = new Alarm.Extras.Builder()
.useVibration(vibration.isChecked())
.useSound(sound.isChecked())
.gameNotification(games.isChecked())
.gameName(choosenGame)
.snoozeInterval(snoozeInterval)
.build();
Uri uri = ac.addAlarm(getApplicationContext(), true, hour, minute, extras);
Alarm a = ac.getAlarm(this, ac.extractIDFromUri(uri));
Toast.makeText(getApplicationContext(), "Alarm added at " + hour + ":" + minute + ". Time left: " + Time.getTimeLeft(a.getTimeInMS()), Toast.LENGTH_SHORT).show();
finish();
}
| private void addAlarm() {
int[] time = queryTimeValues();
int timeBase = 10;
int hour, minute;
// Combine values from format hh:mm to h:m.
if(setAlarmAT) {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
}
else {
hour = time[0] * timeBase + time[1];
minute = time[2] * timeBase + time[3];
// Time now + value selected equals sometime in the future.
Calendar cal = Calendar.getInstance();
cal.add(Calendar.HOUR_OF_DAY, hour);
cal.add(Calendar.MINUTE, minute);
hour = cal.get(Calendar.HOUR_OF_DAY);
minute = cal.get(Calendar.MINUTE);
}
AlarmController ac = AlarmController.INSTANCE;
// Defines the options for the alarm.
Alarm.Extras extras = new Alarm.Extras.Builder()
.useVibration(vibration.isChecked())
.useSound(sound.isChecked())
.gameNotification(games.isChecked())
.gameName(choosenGame)
.snoozeInterval(snoozeInterval)
.addRingtoneIDs(RingtoneFinder.findRingtoneID(this, selectedTones))
.build();
Uri uri = ac.addAlarm(getApplicationContext(), true, hour, minute, extras);
Alarm a = ac.getAlarm(this, ac.extractIDFromUri(uri));
Toast.makeText(getApplicationContext(), "Alarm added at " + hour + ":" + minute + ". Time left: " + Time.getTimeLeft(a.getTimeInMS()), Toast.LENGTH_SHORT).show();
finish();
}
|
diff --git a/stripes/src/net/sourceforge/stripes/action/ForwardResolution.java b/stripes/src/net/sourceforge/stripes/action/ForwardResolution.java
index 4ccf25f..2789389 100644
--- a/stripes/src/net/sourceforge/stripes/action/ForwardResolution.java
+++ b/stripes/src/net/sourceforge/stripes/action/ForwardResolution.java
@@ -1,78 +1,78 @@
/* Copyright 2005-2006 Tim Fennell
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.stripes.action;
import net.sourceforge.stripes.util.Log;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Resolution that uses the Servlet API to <em>forward</em> the user to another path within the
* same web application using a server side forward.
*
* @see RedirectResolution
* @author Tim Fennell
*/
public class ForwardResolution extends OnwardResolution<ForwardResolution> implements Resolution {
private static final Log log = Log.getInstance(ForwardResolution.class);
/**
* Simple constructor that takes in the path to forward the user to.
* @param path the path within the web application that the user should be forwarded to
*/
public ForwardResolution(String path) {
super(path);
}
/**
* Constructs a ForwardResolution that will forward to the URL appropriate for
* the ActionBean supplied. This constructor should be preferred when forwarding
* to an ActionBean as it will ensure the correct URL is always used.
*
* @param beanType the Class object representing the ActionBean to redirect to
*/
public ForwardResolution(Class<? extends ActionBean> beanType) {
super(beanType);
}
/**
* Constructs a ForwardResolution that will forward to the URL appropriate for
* the ActionBean supplied. This constructor should be preferred when forwarding
* to an ActionBean as it will ensure the correct URL is always used.
*
* @param beanType the Class object representing the ActionBean to redirect to
* @param event the event that should be triggered on the redirect
*/
public ForwardResolution(Class<? extends ActionBean> beanType, String event) {
super(beanType, event);
}
/**
* Attempts to forward the user to the specified path.
* @throws ServletException thrown when the Servlet container encounters an error
* @throws IOException thrown when the Servlet container encounters an error
*/
public void execute(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- String path = getPath();
+ String path = getUrl();
log.trace("Forwarding to path: ", path);
request.getRequestDispatcher(path).forward(request, response);
}
}
| true | true | public void execute(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = getPath();
log.trace("Forwarding to path: ", path);
request.getRequestDispatcher(path).forward(request, response);
}
| public void execute(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = getUrl();
log.trace("Forwarding to path: ", path);
request.getRequestDispatcher(path).forward(request, response);
}
|
diff --git a/main/src/com/google/refine/expr/functions/ToNumber.java b/main/src/com/google/refine/expr/functions/ToNumber.java
index 82ebaafe..b0cdfb04 100644
--- a/main/src/com/google/refine/expr/functions/ToNumber.java
+++ b/main/src/com/google/refine/expr/functions/ToNumber.java
@@ -1,74 +1,80 @@
/*
Copyright 2010, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.refine.expr.functions;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONWriter;
import com.google.refine.expr.EvalError;
import com.google.refine.grel.Function;
public class ToNumber implements Function {
@Override
public Object call(Properties bindings, Object[] args) {
if (args.length == 1 && args[0] != null) {
if (args[0] instanceof Number) {
return args[0];
} else {
- String s = args[0].toString();
- try {
- return Double.parseDouble(s);
- } catch (NumberFormatException e) {
- return new EvalError("Cannot parse to number");
+ String s = args[0].toString().trim();
+ if (s.length() > 0) {
+ try {
+ return Long.parseLong(s);
+ } catch (NumberFormatException e) {
+ }
+ try {
+ return Double.parseDouble(s);
+ } catch (NumberFormatException e) {
+ return new EvalError("Cannot parse to number");
+ }
}
}
}
return null;
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("description"); writer.value("Returns o converted to a number");
writer.key("params"); writer.value("o");
writer.key("returns"); writer.value("number");
writer.endObject();
}
}
| true | true | public Object call(Properties bindings, Object[] args) {
if (args.length == 1 && args[0] != null) {
if (args[0] instanceof Number) {
return args[0];
} else {
String s = args[0].toString();
try {
return Double.parseDouble(s);
} catch (NumberFormatException e) {
return new EvalError("Cannot parse to number");
}
}
}
return null;
}
| public Object call(Properties bindings, Object[] args) {
if (args.length == 1 && args[0] != null) {
if (args[0] instanceof Number) {
return args[0];
} else {
String s = args[0].toString().trim();
if (s.length() > 0) {
try {
return Long.parseLong(s);
} catch (NumberFormatException e) {
}
try {
return Double.parseDouble(s);
} catch (NumberFormatException e) {
return new EvalError("Cannot parse to number");
}
}
}
}
return null;
}
|
diff --git a/com.ibm.wala.core/src/com/ibm/wala/ipa/slicer/SliceFunctions.java b/com.ibm.wala.core/src/com/ibm/wala/ipa/slicer/SliceFunctions.java
index 970dc21f0..c96359c88 100644
--- a/com.ibm.wala.core/src/com/ibm/wala/ipa/slicer/SliceFunctions.java
+++ b/com.ibm.wala.core/src/com/ibm/wala/ipa/slicer/SliceFunctions.java
@@ -1,78 +1,79 @@
/*******************************************************************************
* Copyright (c) 2006 IBM Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.wala.ipa.slicer;
import com.ibm.wala.dataflow.IFDS.IFlowFunction;
import com.ibm.wala.dataflow.IFDS.IFlowFunctionMap;
import com.ibm.wala.dataflow.IFDS.IUnaryFlowFunction;
import com.ibm.wala.dataflow.IFDS.IdentityFlowFunction;
import com.ibm.wala.util.debug.Assertions;
/**
* flow functions for FSCS slicer
*
* @author sjfink
*
*/
public class SliceFunctions implements IFlowFunctionMap<Statement> {
public IUnaryFlowFunction getCallFlowFunction(Statement src, Statement dest) {
return ReachabilityFunctions.singleton().getCallFlowFunction(src, dest);
}
public IUnaryFlowFunction getCallNoneToReturnFlowFunction(Statement src, Statement dest) {
if (src == null) {
throw new IllegalArgumentException("src is null");
}
Statement s = src;
switch (s.getKind()) {
case NORMAL_RET_CALLER:
case PARAM_CALLER:
+ case EXC_RET_CALLER:
// uh oh. anything that flows into the missing function will be killed.
return ReachabilityFunctions.killReachability;
case HEAP_PARAM_CALLEE:
case HEAP_PARAM_CALLER:
case HEAP_RET_CALLEE:
case HEAP_RET_CALLER:
if (dest instanceof HeapStatement) {
HeapStatement hd = (HeapStatement)dest;
HeapStatement hs = (HeapStatement)src;
if (hs.getLocation().equals(hd.getLocation())) {
return IdentityFlowFunction.identity();
} else {
return ReachabilityFunctions.killReachability;
}
} else {
return ReachabilityFunctions.killReachability;
}
case NORMAL:
// only control dependence flows into the missing function.
// this control dependence does not flow back to the caller.
return ReachabilityFunctions.killReachability;
default:
Assertions.UNREACHABLE(s.getKind().toString());
return null;
}
}
public IUnaryFlowFunction getCallToReturnFlowFunction(Statement src, Statement dest) {
return ReachabilityFunctions.singleton().getCallToReturnFlowFunction(src, dest);
}
public IUnaryFlowFunction getNormalFlowFunction(Statement src, Statement dest) {
return ReachabilityFunctions.singleton().getNormalFlowFunction(src, dest);
}
public IFlowFunction getReturnFlowFunction(Statement call, Statement src, Statement dest) {
return ReachabilityFunctions.singleton().getReturnFlowFunction(call, src, dest);
}
}
| true | true | public IUnaryFlowFunction getCallNoneToReturnFlowFunction(Statement src, Statement dest) {
if (src == null) {
throw new IllegalArgumentException("src is null");
}
Statement s = src;
switch (s.getKind()) {
case NORMAL_RET_CALLER:
case PARAM_CALLER:
// uh oh. anything that flows into the missing function will be killed.
return ReachabilityFunctions.killReachability;
case HEAP_PARAM_CALLEE:
case HEAP_PARAM_CALLER:
case HEAP_RET_CALLEE:
case HEAP_RET_CALLER:
if (dest instanceof HeapStatement) {
HeapStatement hd = (HeapStatement)dest;
HeapStatement hs = (HeapStatement)src;
if (hs.getLocation().equals(hd.getLocation())) {
return IdentityFlowFunction.identity();
} else {
return ReachabilityFunctions.killReachability;
}
} else {
return ReachabilityFunctions.killReachability;
}
case NORMAL:
// only control dependence flows into the missing function.
// this control dependence does not flow back to the caller.
return ReachabilityFunctions.killReachability;
default:
Assertions.UNREACHABLE(s.getKind().toString());
return null;
}
}
| public IUnaryFlowFunction getCallNoneToReturnFlowFunction(Statement src, Statement dest) {
if (src == null) {
throw new IllegalArgumentException("src is null");
}
Statement s = src;
switch (s.getKind()) {
case NORMAL_RET_CALLER:
case PARAM_CALLER:
case EXC_RET_CALLER:
// uh oh. anything that flows into the missing function will be killed.
return ReachabilityFunctions.killReachability;
case HEAP_PARAM_CALLEE:
case HEAP_PARAM_CALLER:
case HEAP_RET_CALLEE:
case HEAP_RET_CALLER:
if (dest instanceof HeapStatement) {
HeapStatement hd = (HeapStatement)dest;
HeapStatement hs = (HeapStatement)src;
if (hs.getLocation().equals(hd.getLocation())) {
return IdentityFlowFunction.identity();
} else {
return ReachabilityFunctions.killReachability;
}
} else {
return ReachabilityFunctions.killReachability;
}
case NORMAL:
// only control dependence flows into the missing function.
// this control dependence does not flow back to the caller.
return ReachabilityFunctions.killReachability;
default:
Assertions.UNREACHABLE(s.getKind().toString());
return null;
}
}
|
diff --git a/org.eclipse.jdt.debug.ui/ui/org/eclipse/jdt/debug/ui/launchConfigurations/JavaClasspathTab.java b/org.eclipse.jdt.debug.ui/ui/org/eclipse/jdt/debug/ui/launchConfigurations/JavaClasspathTab.java
index 4b1111934..687791f00 100644
--- a/org.eclipse.jdt.debug.ui/ui/org/eclipse/jdt/debug/ui/launchConfigurations/JavaClasspathTab.java
+++ b/org.eclipse.jdt.debug.ui/ui/org/eclipse/jdt/debug/ui/launchConfigurations/JavaClasspathTab.java
@@ -1,466 +1,466 @@
/*******************************************************************************
* Copyright (c) 2000, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* BEA - Daniel R Somerfield - Bug 88939
*******************************************************************************/
package org.eclipse.jdt.debug.ui.launchConfigurations;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.debug.core.ILaunchConfiguration;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.jdt.internal.debug.ui.IJavaDebugHelpContextIds;
import org.eclipse.jdt.internal.debug.ui.JDIDebugUIPlugin;
import org.eclipse.jdt.internal.debug.ui.JavaDebugImages;
import org.eclipse.jdt.internal.debug.ui.actions.AddAdvancedAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddExternalFolderAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddExternalJarAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddFolderAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddJarAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddLibraryAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddProjectAction;
import org.eclipse.jdt.internal.debug.ui.actions.AddVariableAction;
import org.eclipse.jdt.internal.debug.ui.actions.AttachSourceAction;
import org.eclipse.jdt.internal.debug.ui.actions.MoveDownAction;
import org.eclipse.jdt.internal.debug.ui.actions.MoveUpAction;
import org.eclipse.jdt.internal.debug.ui.actions.RemoveAction;
import org.eclipse.jdt.internal.debug.ui.actions.RestoreDefaultEntriesAction;
import org.eclipse.jdt.internal.debug.ui.actions.RuntimeClasspathAction;
import org.eclipse.jdt.internal.debug.ui.classpath.BootpathFilter;
import org.eclipse.jdt.internal.debug.ui.classpath.ClasspathContentProvider;
import org.eclipse.jdt.internal.debug.ui.classpath.ClasspathEntry;
import org.eclipse.jdt.internal.debug.ui.classpath.ClasspathLabelProvider;
import org.eclipse.jdt.internal.debug.ui.classpath.ClasspathModel;
import org.eclipse.jdt.internal.debug.ui.classpath.IClasspathEntry;
import org.eclipse.jdt.internal.debug.ui.classpath.RuntimeClasspathViewer;
import org.eclipse.jdt.internal.debug.ui.launcher.JavaLaunchConfigurationTab;
import org.eclipse.jdt.internal.debug.ui.launcher.LauncherMessages;
import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.jface.action.IAction;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.ui.PlatformUI;
/**
* A launch configuration tab that displays and edits the user and
* bootstrap classes comprising the classpath launch configuration
* attribute.
* <p>
* This class may be instantiated. This class is not intended to be subclassed.
* </p>
* @since 2.0
*/
public class JavaClasspathTab extends JavaLaunchConfigurationTab {
protected RuntimeClasspathViewer fClasspathViewer;
private ClasspathModel fModel;
protected static final String DIALOG_SETTINGS_PREFIX = "JavaClasspathTab"; //$NON-NLS-1$
/**
* The last launch config this tab was initialized from
*/
protected ILaunchConfiguration fLaunchConfiguration;
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#createControl(org.eclipse.swt.widgets.Composite)
*/
public void createControl(Composite parent) {
Font font = parent.getFont();
Composite comp = new Composite(parent, SWT.NONE);
setControl(comp);
PlatformUI.getWorkbench().getHelpSystem().setHelp(getControl(), IJavaDebugHelpContextIds.LAUNCH_CONFIGURATION_DIALOG_CLASSPATH_TAB);
GridLayout topLayout = new GridLayout();
topLayout.numColumns = 2;
comp.setLayout(topLayout);
GridData gd;
Label label = new Label(comp, SWT.NONE);
label.setText(LauncherMessages.JavaClasspathTab_0); //$NON-NLS-1$
gd = new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING);
gd.horizontalSpan = 2;
label.setLayoutData(gd);
fClasspathViewer = new RuntimeClasspathViewer(comp);
fClasspathViewer.addEntriesChangedListener(this);
fClasspathViewer.getControl().setFont(font);
fClasspathViewer.setLabelProvider(new ClasspathLabelProvider());
fClasspathViewer.setContentProvider(new ClasspathContentProvider(this));
if (!isShowBootpath()) {
fClasspathViewer.addFilter(new BootpathFilter());
}
Composite pathButtonComp = new Composite(comp, SWT.NONE);
GridLayout pathButtonLayout = new GridLayout();
pathButtonLayout.marginHeight = 0;
pathButtonLayout.marginWidth = 0;
pathButtonComp.setLayout(pathButtonLayout);
gd = new GridData(GridData.VERTICAL_ALIGN_BEGINNING | GridData.HORIZONTAL_ALIGN_FILL);
pathButtonComp.setLayoutData(gd);
pathButtonComp.setFont(font);
createPathButtons(pathButtonComp);
}
/**
* Creates the buttons to manipulate the classpath.
*
* @param pathButtonComp composite buttons are contained in
* @since 3.0
*/
protected void createPathButtons(Composite pathButtonComp) {
List advancedActions = new ArrayList(5);
createButton(pathButtonComp, new MoveUpAction(fClasspathViewer));
createButton(pathButtonComp, new MoveDownAction(fClasspathViewer));
createButton(pathButtonComp, new RemoveAction(fClasspathViewer));
createButton(pathButtonComp, new AddProjectAction(fClasspathViewer));
createButton(pathButtonComp, new AddJarAction(fClasspathViewer));
createButton(pathButtonComp, new AddExternalJarAction(fClasspathViewer, DIALOG_SETTINGS_PREFIX));
RuntimeClasspathAction action = new AddFolderAction(null);
advancedActions.add(action);
action = new AddExternalFolderAction(null, DIALOG_SETTINGS_PREFIX);
advancedActions.add(action);
action = new AddVariableAction(null);
advancedActions.add(action);
action = new AddLibraryAction(null);
advancedActions.add(action);
action = new AttachSourceAction(null, SWT.RADIO);
advancedActions.add(action);
IAction[] adv = (IAction[])advancedActions.toArray(new IAction[advancedActions.size()]);
createButton(pathButtonComp, new AddAdvancedAction(fClasspathViewer, adv));
action= new RestoreDefaultEntriesAction(fClasspathViewer, this);
createButton(pathButtonComp, action);
action.setEnabled(true);
}
/**
* Creates a button for the given action.
*
* @param pathButtonComp parent composite for the button
* @param action the action triggered by the button
* @return the button that was created
*/
protected Button createButton(Composite pathButtonComp, RuntimeClasspathAction action) {
Button button = createPushButton(pathButtonComp, action.getText(), null);
action.setButton(button);
return button;
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#setDefaults(org.eclipse.debug.core.ILaunchConfigurationWorkingCopy)
*/
public void setDefaults(ILaunchConfigurationWorkingCopy configuration) {
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#initializeFrom(org.eclipse.debug.core.ILaunchConfiguration)
*/
public void initializeFrom(ILaunchConfiguration configuration) {
refresh(configuration);
fClasspathViewer.expandToLevel(2);
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#activated(org.eclipse.debug.core.ILaunchConfigurationWorkingCopy)
*/
public void activated(ILaunchConfigurationWorkingCopy workingCopy) {
try {
boolean useDefault= workingCopy.getAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, true);
if (useDefault) {
if (!isDefaultClasspath(getCurrentClasspath(), workingCopy)) {
initializeFrom(workingCopy);
return;
}
}
fClasspathViewer.refresh();
} catch (CoreException e) {
}
}
/**
* Refreshes the classpath entries based on the current state of the given
* launch configuration.
*/
private void refresh(ILaunchConfiguration configuration) {
boolean useDefault = true;
setErrorMessage(null);
try {
useDefault = configuration.getAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, true);
} catch (CoreException e) {
JDIDebugUIPlugin.log(e);
}
if (configuration == getLaunchConfiguration()) {
// no need to update if an explicit path is being used and this setting
// has not changed (and viewing the same config as last time)
if (!useDefault) {
setDirty(false);
return;
}
}
setLaunchConfiguration(configuration);
try {
createClasspathModel(configuration);
} catch (CoreException e) {
setErrorMessage(e.getMessage());
}
fClasspathViewer.setLaunchConfiguration(configuration);
fClasspathViewer.setInput(fModel);
setDirty(false);
}
private void createClasspathModel(ILaunchConfiguration configuration) throws CoreException {
fModel= new ClasspathModel();
IRuntimeClasspathEntry[] entries= JavaRuntime.computeUnresolvedRuntimeClasspath(configuration);
IRuntimeClasspathEntry entry;
for (int i = 0; i < entries.length; i++) {
entry= entries[i];
switch (entry.getClasspathProperty()) {
case IRuntimeClasspathEntry.USER_CLASSES:
fModel.addEntry(ClasspathModel.USER, entry);
break;
default:
fModel.addEntry(ClasspathModel.BOOTSTRAP, entry);
break;
}
}
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#performApply(org.eclipse.debug.core.ILaunchConfigurationWorkingCopy)
*/
public void performApply(ILaunchConfigurationWorkingCopy configuration) {
if (isDirty()) {
IRuntimeClasspathEntry[] classpath = getCurrentClasspath();
boolean def = isDefaultClasspath(classpath, configuration.getOriginal());
if (def) {
configuration.setAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, (String)null);
configuration.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, (String)null);
} else {
configuration.setAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
try {
List mementos = new ArrayList(classpath.length);
for (int i = 0; i < classpath.length; i++) {
IRuntimeClasspathEntry entry = classpath[i];
mementos.add(entry.getMemento());
}
configuration.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, mementos);
} catch (CoreException e) {
JDIDebugUIPlugin.errorDialog(LauncherMessages.JavaClasspathTab_Unable_to_save_classpath_1, e); //$NON-NLS-1$
}
}
}
}
/**
* Returns the classpath entries currently specified by this tab.
*
* @return the classpath entries currently specified by this tab
*/
private IRuntimeClasspathEntry[] getCurrentClasspath() {
IClasspathEntry[] boot = fModel.getEntries(ClasspathModel.BOOTSTRAP);
IClasspathEntry[] user = fModel.getEntries(ClasspathModel.USER);
List entries = new ArrayList(boot.length + user.length);
IClasspathEntry bootEntry;
IRuntimeClasspathEntry entry;
for (int i = 0; i < boot.length; i++) {
bootEntry= boot[i];
entry = null;
if (bootEntry instanceof ClasspathEntry) {
entry = ((ClasspathEntry)bootEntry).getDelegate();
} else if (bootEntry instanceof IRuntimeClasspathEntry) {
entry= (IRuntimeClasspathEntry) boot[i];
}
if (entry != null) {
if (entry.getClasspathProperty() == IRuntimeClasspathEntry.USER_CLASSES) {
entry.setClasspathProperty(IRuntimeClasspathEntry.BOOTSTRAP_CLASSES);
}
entries.add(entry);
}
}
IClasspathEntry userEntry;
for (int i = 0; i < user.length; i++) {
userEntry= user[i];
entry = null;
if (userEntry instanceof ClasspathEntry) {
entry = ((ClasspathEntry)userEntry).getDelegate();
} else if (userEntry instanceof IRuntimeClasspathEntry) {
entry= (IRuntimeClasspathEntry) user[i];
}
if (entry != null) {
entry.setClasspathProperty(IRuntimeClasspathEntry.USER_CLASSES);
entries.add(entry);
}
}
return (IRuntimeClasspathEntry[]) entries.toArray(new IRuntimeClasspathEntry[entries.size()]);
}
/**
* Returns whether the specified classpath is equivalent to the
* default classpath for this configuration.
*
* @param classpath classpath to compare to default
* @param configuration original configuration
* @return whether the specified classpath is equivalent to the
* default classpath for this configuration
*/
private boolean isDefaultClasspath(IRuntimeClasspathEntry[] classpath, ILaunchConfiguration configuration) {
try {
ILaunchConfigurationWorkingCopy wc = configuration.getWorkingCopy();
wc.setAttribute(IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, true);
IRuntimeClasspathEntry[] entries= JavaRuntime.computeUnresolvedRuntimeClasspath(wc);
if (classpath.length == entries.length) {
for (int i = 0; i < entries.length; i++) {
IRuntimeClasspathEntry entry = entries[i];
if (!entry.equals(classpath[i])) {
return false;
}
}
return true;
}
return false;
} catch (CoreException e) {
return false;
}
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#getName()
*/
public String getName() {
return LauncherMessages.JavaClasspathTab_Cla_ss_path_3; //$NON-NLS-1$
}
/**
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#getImage()
*/
public static Image getClasspathImage() {
return JavaDebugImages.get(JavaDebugImages.IMG_OBJS_CLASSPATH);
}
/**
* Sets the launch configuration for this classpath tab
*/
private void setLaunchConfiguration(ILaunchConfiguration config) {
fLaunchConfiguration = config;
}
/**
* Returns the current launch configuration
*/
public ILaunchConfiguration getLaunchConfiguration() {
return fLaunchConfiguration;
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#dispose()
*/
public void dispose() {
if (fClasspathViewer != null) {
fClasspathViewer.removeEntriesChangedListener(this);
}
super.dispose();
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#getImage()
*/
public Image getImage() {
return getClasspathImage();
}
/* (non-Javadoc)
* @see org.eclipse.debug.ui.ILaunchConfigurationTab#isValid(org.eclipse.debug.core.ILaunchConfiguration)
*/
public boolean isValid(ILaunchConfiguration launchConfig) {
setErrorMessage(null);
setMessage(null);
String projectName= null;
try {
projectName= launchConfig.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROJECT_NAME, ""); //$NON-NLS-1$
} catch (CoreException e) {
return false;
}
if (projectName.length() > 0) {
IWorkspace workspace = ResourcesPlugin.getWorkspace();
IStatus status = workspace.validateName(projectName, IResource.PROJECT);
if (status.isOK()) {
IProject project= ResourcesPlugin.getWorkspace().getRoot().getProject(projectName);
if (!project.exists()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_20, new String[] {projectName})); //$NON-NLS-1$
return false;
}
if (!project.isOpen()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_21, new String[] {projectName})); //$NON-NLS-1$
return false;
}
} else {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_19, new String[]{status.getMessage()})); //$NON-NLS-1$
return false;
}
}
IRuntimeClasspathEntry [] entries = fModel.getAllEntries();
for (int i=0; i<entries.length; i++)
{
if (entries[i].getType() == IRuntimeClasspathEntry.ARCHIVE
&& (!entries[i].getPath().isAbsolute()))
{
- setErrorMessage(LauncherMessages.JavaClasspathTab_Invalid_runtime_classpath_1);
+ setErrorMessage(MessageFormat.format(LauncherMessages.JavaClasspathTab_Invalid_runtime_classpath_1, new String[]{entries[i].getPath().toString()}));
return false;
}
}
return true;
}
/**
* Returns whether the bootpath should be displayed.
*
* @return whether the bootpath should be displayed
* @since 3.0
*/
public boolean isShowBootpath() {
return true;
}
/**
* @return Returns the classpath model.
*/
protected ClasspathModel getModel() {
return fModel;
}
}
| true | true | public boolean isValid(ILaunchConfiguration launchConfig) {
setErrorMessage(null);
setMessage(null);
String projectName= null;
try {
projectName= launchConfig.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROJECT_NAME, ""); //$NON-NLS-1$
} catch (CoreException e) {
return false;
}
if (projectName.length() > 0) {
IWorkspace workspace = ResourcesPlugin.getWorkspace();
IStatus status = workspace.validateName(projectName, IResource.PROJECT);
if (status.isOK()) {
IProject project= ResourcesPlugin.getWorkspace().getRoot().getProject(projectName);
if (!project.exists()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_20, new String[] {projectName})); //$NON-NLS-1$
return false;
}
if (!project.isOpen()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_21, new String[] {projectName})); //$NON-NLS-1$
return false;
}
} else {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_19, new String[]{status.getMessage()})); //$NON-NLS-1$
return false;
}
}
IRuntimeClasspathEntry [] entries = fModel.getAllEntries();
for (int i=0; i<entries.length; i++)
{
if (entries[i].getType() == IRuntimeClasspathEntry.ARCHIVE
&& (!entries[i].getPath().isAbsolute()))
{
setErrorMessage(LauncherMessages.JavaClasspathTab_Invalid_runtime_classpath_1);
return false;
}
}
return true;
}
| public boolean isValid(ILaunchConfiguration launchConfig) {
setErrorMessage(null);
setMessage(null);
String projectName= null;
try {
projectName= launchConfig.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROJECT_NAME, ""); //$NON-NLS-1$
} catch (CoreException e) {
return false;
}
if (projectName.length() > 0) {
IWorkspace workspace = ResourcesPlugin.getWorkspace();
IStatus status = workspace.validateName(projectName, IResource.PROJECT);
if (status.isOK()) {
IProject project= ResourcesPlugin.getWorkspace().getRoot().getProject(projectName);
if (!project.exists()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_20, new String[] {projectName})); //$NON-NLS-1$
return false;
}
if (!project.isOpen()) {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_21, new String[] {projectName})); //$NON-NLS-1$
return false;
}
} else {
setErrorMessage(MessageFormat.format(LauncherMessages.JavaMainTab_19, new String[]{status.getMessage()})); //$NON-NLS-1$
return false;
}
}
IRuntimeClasspathEntry [] entries = fModel.getAllEntries();
for (int i=0; i<entries.length; i++)
{
if (entries[i].getType() == IRuntimeClasspathEntry.ARCHIVE
&& (!entries[i].getPath().isAbsolute()))
{
setErrorMessage(MessageFormat.format(LauncherMessages.JavaClasspathTab_Invalid_runtime_classpath_1, new String[]{entries[i].getPath().toString()}));
return false;
}
}
return true;
}
|
diff --git a/src/com/bingo/eatime/admin/EventAdminServlet.java b/src/com/bingo/eatime/admin/EventAdminServlet.java
index 9304c24..4e24e7f 100644
--- a/src/com/bingo/eatime/admin/EventAdminServlet.java
+++ b/src/com/bingo/eatime/admin/EventAdminServlet.java
@@ -1,209 +1,211 @@
package com.bingo.eatime.admin;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.bingo.eatime.core.Event;
import com.bingo.eatime.core.EventManager;
import com.bingo.eatime.core.Person;
import com.bingo.eatime.core.PersonManager;
import com.bingo.eatime.core.Restaurant;
import com.bingo.eatime.core.RestaurantManager;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
public class EventAdminServlet extends HttpServlet {
private static final long serialVersionUID = 8208066016833205326L;
private static final Logger log = Logger.getLogger(EventAdminServlet.class.getName());
// Status succeed
private static final int STATUS_SUCCEED = 0;
// Level 100 error - low priority error
private static final int ERROR_UNKNOWN = 100;
// Level 200 error - argument error
private static final int ERROR_MISSING_ARGUMENT = 200;
private static final int ERROR_MALFORMED_ARGUMENT = 201;
private static final int ERROR_USERNAME_NOT_FOUND = 202;
private static final int ERROR_RESTAURANT_NOT_FOUND = 203;
private static final int ERROR_CURRENT_USER_NOT_FOUND = 204;
private static final int ERROR_UNKNOWN_ACTION = 299;
// Level 300 error - database error
private static final int ERROR_DATABASE_FAILED = 300;
public void doGet(HttpServletRequest req, HttpServletResponse resp) {
int status = STATUS_SUCCEED;
List<String> usernameNotFoundList = new ArrayList<String>();
// Get arguments strings from parameters
String action = req.getParameter("action");
String eventName = req.getParameter("name");
String restaurantKeyName = req.getParameter("restaurant");
String creatorUsername = req.getParameter("username");
String dateString = req.getParameter("date");
String[] invitesUsername = req.getParameterValues("invite");
String eventIdString = req.getParameter("id");
// Parsing argument strings
Date time = null;
if (dateString != null) {
try {
long timeMillisecond = Long.valueOf(dateString);
time = new Date(timeMillisecond);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse date to long.");
}
}
Person creator = null;
if (creatorUsername != null) {
creator = PersonManager.getPersonByUsername(creatorUsername);
if (creator == null && ERROR_CURRENT_USER_NOT_FOUND > status) {
status = ERROR_CURRENT_USER_NOT_FOUND;
}
}
List<Person> invites = null;
if (invitesUsername != null) {
invites = new ArrayList<Person>();
for (String inviteUsername : invitesUsername) {
Person invite = PersonManager.getPersonByUsername(inviteUsername);
if (invite == null && ERROR_USERNAME_NOT_FOUND > status) {
status = ERROR_USERNAME_NOT_FOUND;
usernameNotFoundList.add(inviteUsername);
} else {
invites.add(invite);
}
}
}
Restaurant restaurant = null;
if (restaurantKeyName != null) {
restaurant = RestaurantManager.getRestaurant(restaurantKeyName);
if (restaurant == null && ERROR_RESTAURANT_NOT_FOUND > status) {
status = ERROR_RESTAURANT_NOT_FOUND;
}
}
Long eventId = null;
if (eventIdString != null) {
try {
eventId = Long.valueOf(eventIdString);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse event id to interger.");
}
}
// Conduct action
Key eventKey = null;
if (action != null) {
if (action.equals("add")) {
// Create new event
- if (eventName != null && restaurant != null && creator != null && time != null) {
+ if (status == STATUS_SUCCEED && eventName != null &&
+ restaurant != null && creator != null && time != null) {
Event event = Event.createEvent(eventName, restaurant, creator, time, invites);
eventKey = EventManager.addEvent(event);
if (eventKey == null && ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (action.equals("invite")) {
// Add new invites
- if (eventId != null && invites != null && restaurantKeyName != null) {
+ if (status == STATUS_SUCCEED && eventId != null && invites != null
+ && restaurantKeyName != null) {
boolean result = EventManager.addInvites(invites, eventId, restaurantKeyName);
if (result) {
eventKey = KeyFactory.createKey(Event.KIND_EVENT, eventId);
} else if (ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (ERROR_UNKNOWN_ACTION > status) {
// Unknown action
status = ERROR_UNKNOWN_ACTION;
}
}
// Write to client
try {
resp.setContentType("application/json");
PrintWriter writer = resp.getWriter();
if (status == STATUS_SUCCEED && eventKey != null) {
writer.print(generateResponseJson(status, null));
} else {
writer.print(generateResponseJson(status, usernameNotFoundList));
}
} catch (IOException e) {
log.log(Level.SEVERE, "Cannot get print writer.", e);
}
}
private String generateResponseJson(int status, List<String> bundle) {
if (status == STATUS_SUCCEED) {
return "{ \"status\": " + status + " }";
} else {
String reason = "";
switch(status) {
case ERROR_UNKNOWN:
reason = "Unknown error.";
break;
case ERROR_MISSING_ARGUMENT:
reason = "Missing argument.";
break;
case ERROR_UNKNOWN_ACTION:
reason = "Unknown action.";
break;
case ERROR_DATABASE_FAILED:
reason = "Update database failed.";
break;
case ERROR_USERNAME_NOT_FOUND:
StringBuilder sb = new StringBuilder();
sb.append("Following username not found: ");
for (String username : bundle) {
sb.append(username);
sb.append(", ");
}
sb.delete(sb.length() - 2, sb.length());
sb.append(".");
reason = sb.toString();
break;
case ERROR_CURRENT_USER_NOT_FOUND:
reason = "Current user not found.";
break;
case ERROR_MALFORMED_ARGUMENT:
reason = "Malformed argument.";
break;
case ERROR_RESTAURANT_NOT_FOUND:
reason = "Restaurant not found.";
break;
default:
break;
}
return "{ \"status\": " + status + ", \"reason\": \"" + reason + "\" }";
}
}
}
| false | true | public void doGet(HttpServletRequest req, HttpServletResponse resp) {
int status = STATUS_SUCCEED;
List<String> usernameNotFoundList = new ArrayList<String>();
// Get arguments strings from parameters
String action = req.getParameter("action");
String eventName = req.getParameter("name");
String restaurantKeyName = req.getParameter("restaurant");
String creatorUsername = req.getParameter("username");
String dateString = req.getParameter("date");
String[] invitesUsername = req.getParameterValues("invite");
String eventIdString = req.getParameter("id");
// Parsing argument strings
Date time = null;
if (dateString != null) {
try {
long timeMillisecond = Long.valueOf(dateString);
time = new Date(timeMillisecond);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse date to long.");
}
}
Person creator = null;
if (creatorUsername != null) {
creator = PersonManager.getPersonByUsername(creatorUsername);
if (creator == null && ERROR_CURRENT_USER_NOT_FOUND > status) {
status = ERROR_CURRENT_USER_NOT_FOUND;
}
}
List<Person> invites = null;
if (invitesUsername != null) {
invites = new ArrayList<Person>();
for (String inviteUsername : invitesUsername) {
Person invite = PersonManager.getPersonByUsername(inviteUsername);
if (invite == null && ERROR_USERNAME_NOT_FOUND > status) {
status = ERROR_USERNAME_NOT_FOUND;
usernameNotFoundList.add(inviteUsername);
} else {
invites.add(invite);
}
}
}
Restaurant restaurant = null;
if (restaurantKeyName != null) {
restaurant = RestaurantManager.getRestaurant(restaurantKeyName);
if (restaurant == null && ERROR_RESTAURANT_NOT_FOUND > status) {
status = ERROR_RESTAURANT_NOT_FOUND;
}
}
Long eventId = null;
if (eventIdString != null) {
try {
eventId = Long.valueOf(eventIdString);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse event id to interger.");
}
}
// Conduct action
Key eventKey = null;
if (action != null) {
if (action.equals("add")) {
// Create new event
if (eventName != null && restaurant != null && creator != null && time != null) {
Event event = Event.createEvent(eventName, restaurant, creator, time, invites);
eventKey = EventManager.addEvent(event);
if (eventKey == null && ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (action.equals("invite")) {
// Add new invites
if (eventId != null && invites != null && restaurantKeyName != null) {
boolean result = EventManager.addInvites(invites, eventId, restaurantKeyName);
if (result) {
eventKey = KeyFactory.createKey(Event.KIND_EVENT, eventId);
} else if (ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (ERROR_UNKNOWN_ACTION > status) {
// Unknown action
status = ERROR_UNKNOWN_ACTION;
}
}
// Write to client
try {
resp.setContentType("application/json");
PrintWriter writer = resp.getWriter();
if (status == STATUS_SUCCEED && eventKey != null) {
writer.print(generateResponseJson(status, null));
} else {
writer.print(generateResponseJson(status, usernameNotFoundList));
}
} catch (IOException e) {
log.log(Level.SEVERE, "Cannot get print writer.", e);
}
}
| public void doGet(HttpServletRequest req, HttpServletResponse resp) {
int status = STATUS_SUCCEED;
List<String> usernameNotFoundList = new ArrayList<String>();
// Get arguments strings from parameters
String action = req.getParameter("action");
String eventName = req.getParameter("name");
String restaurantKeyName = req.getParameter("restaurant");
String creatorUsername = req.getParameter("username");
String dateString = req.getParameter("date");
String[] invitesUsername = req.getParameterValues("invite");
String eventIdString = req.getParameter("id");
// Parsing argument strings
Date time = null;
if (dateString != null) {
try {
long timeMillisecond = Long.valueOf(dateString);
time = new Date(timeMillisecond);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse date to long.");
}
}
Person creator = null;
if (creatorUsername != null) {
creator = PersonManager.getPersonByUsername(creatorUsername);
if (creator == null && ERROR_CURRENT_USER_NOT_FOUND > status) {
status = ERROR_CURRENT_USER_NOT_FOUND;
}
}
List<Person> invites = null;
if (invitesUsername != null) {
invites = new ArrayList<Person>();
for (String inviteUsername : invitesUsername) {
Person invite = PersonManager.getPersonByUsername(inviteUsername);
if (invite == null && ERROR_USERNAME_NOT_FOUND > status) {
status = ERROR_USERNAME_NOT_FOUND;
usernameNotFoundList.add(inviteUsername);
} else {
invites.add(invite);
}
}
}
Restaurant restaurant = null;
if (restaurantKeyName != null) {
restaurant = RestaurantManager.getRestaurant(restaurantKeyName);
if (restaurant == null && ERROR_RESTAURANT_NOT_FOUND > status) {
status = ERROR_RESTAURANT_NOT_FOUND;
}
}
Long eventId = null;
if (eventIdString != null) {
try {
eventId = Long.valueOf(eventIdString);
} catch (IllegalArgumentException e) {
if (ERROR_MALFORMED_ARGUMENT > status) {
status = ERROR_MALFORMED_ARGUMENT;
}
log.warning("Cannot parse event id to interger.");
}
}
// Conduct action
Key eventKey = null;
if (action != null) {
if (action.equals("add")) {
// Create new event
if (status == STATUS_SUCCEED && eventName != null &&
restaurant != null && creator != null && time != null) {
Event event = Event.createEvent(eventName, restaurant, creator, time, invites);
eventKey = EventManager.addEvent(event);
if (eventKey == null && ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (action.equals("invite")) {
// Add new invites
if (status == STATUS_SUCCEED && eventId != null && invites != null
&& restaurantKeyName != null) {
boolean result = EventManager.addInvites(invites, eventId, restaurantKeyName);
if (result) {
eventKey = KeyFactory.createKey(Event.KIND_EVENT, eventId);
} else if (ERROR_DATABASE_FAILED > status) {
status = ERROR_DATABASE_FAILED;
}
} else if (ERROR_MISSING_ARGUMENT > status) {
status = ERROR_MISSING_ARGUMENT;
}
} else if (ERROR_UNKNOWN_ACTION > status) {
// Unknown action
status = ERROR_UNKNOWN_ACTION;
}
}
// Write to client
try {
resp.setContentType("application/json");
PrintWriter writer = resp.getWriter();
if (status == STATUS_SUCCEED && eventKey != null) {
writer.print(generateResponseJson(status, null));
} else {
writer.print(generateResponseJson(status, usernameNotFoundList));
}
} catch (IOException e) {
log.log(Level.SEVERE, "Cannot get print writer.", e);
}
}
|
diff --git a/org.eclipse.b3.aggregator.engine/src/org/eclipse/b3/aggregator/engine/internal/RequirementUtils.java b/org.eclipse.b3.aggregator.engine/src/org/eclipse/b3/aggregator/engine/internal/RequirementUtils.java
index 322b3526..0b0d4ac0 100644
--- a/org.eclipse.b3.aggregator.engine/src/org/eclipse/b3/aggregator/engine/internal/RequirementUtils.java
+++ b/org.eclipse.b3.aggregator.engine/src/org/eclipse/b3/aggregator/engine/internal/RequirementUtils.java
@@ -1,162 +1,164 @@
/**
* Copyright (c) 2006-2009, Cloudsmith Inc.
* The code, documentation and other materials contained herein have been
* licensed under the Eclipse Public License - v 1.0 by the copyright holder
* listed above, as the Initial Contributor under such license. The text of
* such license is available at www.eclipse.org.
*/
package org.eclipse.b3.aggregator.engine.internal;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.b3.p2.MetadataRepository;
import org.eclipse.b3.util.LogUtils;
import org.eclipse.equinox.internal.p2.metadata.IRequiredCapability;
import org.eclipse.equinox.internal.p2.metadata.expression.ExpressionFactory;
import org.eclipse.equinox.p2.metadata.IInstallableUnit;
import org.eclipse.equinox.p2.metadata.IRequirement;
import org.eclipse.equinox.p2.metadata.MetadataFactory;
import org.eclipse.equinox.p2.metadata.Version;
import org.eclipse.equinox.p2.metadata.VersionRange;
import org.eclipse.equinox.p2.metadata.expression.IMatchExpression;
/**
* @author [email protected]
*
*/
@SuppressWarnings("deprecation")
public class RequirementUtils {
private static final VersionRange ANY_VERSION = VersionRange.emptyRange;
public static IRequirement[] createAllAvailableVersionsRequirements(List<IInstallableUnit> ius,
- IMatchExpression<IInstallableUnit> filter) {
+ final IMatchExpression<IInstallableUnit> filter) {
Map<String, Set<Version>> versionMap = new HashMap<String, Set<Version>>();
Map<String, Set<IMatchExpression<IInstallableUnit>>> filterMap = new HashMap<String, Set<IMatchExpression<IInstallableUnit>>>();
for(IInstallableUnit iu : ius) {
Set<Version> versionSet = versionMap.get(iu.getId());
if(versionSet == null) {
versionMap.put(iu.getId(), versionSet = new HashSet<Version>());
}
Set<IMatchExpression<IInstallableUnit>> filterSet = filterMap.get(iu.getId());
if(filterSet == null) {
filterMap.put(iu.getId(), filterSet = new HashSet<IMatchExpression<IInstallableUnit>>());
}
versionSet.add(iu.getVersion());
filterSet.add(iu.getFilter());
}
IRequirement[] requirements = new IRequirement[versionMap.size()];
int i = 0;
for(Map.Entry<String, Set<Version>> iuEntry : versionMap.entrySet()) {
String name = iuEntry.getKey();
String namespace = IInstallableUnit.NAMESPACE_IU_ID;
IMatchExpression<IInstallableUnit> inheritedFilter = null;
for(IMatchExpression<IInstallableUnit> iuFilter : filterMap.get(name)) {
if(inheritedFilter == null)
inheritedFilter = iuFilter;
else if(!inheritedFilter.equals(iuFilter)) {
LogUtils.info("More than one filter definition found on %s; using an empty filter", name);
inheritedFilter = null;
break;
}
}
+ IMatchExpression<IInstallableUnit> requirementFilter = filter;
if(inheritedFilter != null) {
- if(filter == null)
- filter = inheritedFilter;
+ if(requirementFilter == null)
+ requirementFilter = inheritedFilter;
else {
Object[] inheritedFilterParams = inheritedFilter.getParameters();
Object[] filterParams = filter.getParameters();
Object[] compoundParams = new Object[inheritedFilterParams.length + filterParams.length];
System.arraycopy(inheritedFilterParams, 0, compoundParams, 0, inheritedFilterParams.length);
System.arraycopy(filterParams, 0, compoundParams, inheritedFilterParams.length, filterParams.length);
- filter = ExpressionFactory.INSTANCE.matchExpression(ExpressionFactory.INSTANCE.and(
- inheritedFilter, filter), compoundParams);
+ requirementFilter = ExpressionFactory.INSTANCE.matchExpression(
+ ExpressionFactory.INSTANCE.and(inheritedFilter, filter), compoundParams);
}
}
// TODO Use this to activate the "version enumeration" policy workaround
// requirements[i++] = new MultiRangeRequirement(name, namespace, iuEntry.getValue(), null, filter);
- requirements[i++] = MetadataFactory.createRequirement(namespace, name, ANY_VERSION, filter, false, false);
+ requirements[i++] = MetadataFactory.createRequirement(
+ namespace, name, ANY_VERSION, requirementFilter, false, false);
}
return requirements;
}
public static IRequirement createMultiRangeRequirement(MetadataRepository mdr, IRequirement req) {
Set<Version> matchingVersions = new HashSet<Version>();
String name = null;
String namespace = IInstallableUnit.NAMESPACE_IU_ID;
for(IInstallableUnit iu : mdr.getInstallableUnits()) {
if(req.isMatch(iu)) {
matchingVersions.add(iu.getVersion());
if(name == null)
name = iu.getId();
else if(!name.equals(iu.getId()))
throw new RuntimeException("Requirement must contain strict name filter");
}
}
return new MultiRangeRequirement(name, namespace, matchingVersions, null, req.getFilter());
}
/**
* Retrieves IU name from a requirement
*
* @param req
* @return the name or null if this method is unable to figure it out
*/
public static String getName(IRequirement req) {
if(req instanceof IRequiredCapability)
return ((IRequiredCapability) req).getName();
else if(req instanceof MultiRangeRequirement)
return ((MultiRangeRequirement) req).getName();
throw new RuntimeException("Unable to extrace IU name from requirement of class " + req.getClass().getName());
}
/**
* @param req1
* @param req2
* @return
*/
public static IRequirement versionUnion(IRequirement req1, IRequirement req2) {
if(req1 instanceof MultiRangeRequirement && req2 instanceof MultiRangeRequirement) {
MultiRangeRequirement vreq1 = (MultiRangeRequirement) req1;
MultiRangeRequirement vreq2 = (MultiRangeRequirement) req2;
if(!vreq1.getName().equals(vreq2.getName()))
throw new RuntimeException(
"Unable to create a version union of expressions with different name requests");
if(!vreq1.getNamespace().equals(vreq2.getNamespace()))
throw new RuntimeException(
"Unable to create a version union of expressions with different namespace requests");
IMatchExpression<IInstallableUnit> f1 = vreq1.getFilter();
IMatchExpression<IInstallableUnit> f2 = vreq2.getFilter();
if(f1 != null && !f1.equals(f2) || f1 == null && f2 != null)
throw new RuntimeException("Unable to create a version union of expressions with different filters");
Set<Version> allVersions = new HashSet<Version>();
allVersions.addAll(vreq1.getVersions());
allVersions.addAll(vreq2.getVersions());
return new MultiRangeRequirement(vreq1.getName(), vreq1.getNamespace(), allVersions, null, f1);
}
throw new RuntimeException("Unable to create a version union of expressions: " + req1 + ", " + req2);
}
}
| false | true | public static IRequirement[] createAllAvailableVersionsRequirements(List<IInstallableUnit> ius,
IMatchExpression<IInstallableUnit> filter) {
Map<String, Set<Version>> versionMap = new HashMap<String, Set<Version>>();
Map<String, Set<IMatchExpression<IInstallableUnit>>> filterMap = new HashMap<String, Set<IMatchExpression<IInstallableUnit>>>();
for(IInstallableUnit iu : ius) {
Set<Version> versionSet = versionMap.get(iu.getId());
if(versionSet == null) {
versionMap.put(iu.getId(), versionSet = new HashSet<Version>());
}
Set<IMatchExpression<IInstallableUnit>> filterSet = filterMap.get(iu.getId());
if(filterSet == null) {
filterMap.put(iu.getId(), filterSet = new HashSet<IMatchExpression<IInstallableUnit>>());
}
versionSet.add(iu.getVersion());
filterSet.add(iu.getFilter());
}
IRequirement[] requirements = new IRequirement[versionMap.size()];
int i = 0;
for(Map.Entry<String, Set<Version>> iuEntry : versionMap.entrySet()) {
String name = iuEntry.getKey();
String namespace = IInstallableUnit.NAMESPACE_IU_ID;
IMatchExpression<IInstallableUnit> inheritedFilter = null;
for(IMatchExpression<IInstallableUnit> iuFilter : filterMap.get(name)) {
if(inheritedFilter == null)
inheritedFilter = iuFilter;
else if(!inheritedFilter.equals(iuFilter)) {
LogUtils.info("More than one filter definition found on %s; using an empty filter", name);
inheritedFilter = null;
break;
}
}
if(inheritedFilter != null) {
if(filter == null)
filter = inheritedFilter;
else {
Object[] inheritedFilterParams = inheritedFilter.getParameters();
Object[] filterParams = filter.getParameters();
Object[] compoundParams = new Object[inheritedFilterParams.length + filterParams.length];
System.arraycopy(inheritedFilterParams, 0, compoundParams, 0, inheritedFilterParams.length);
System.arraycopy(filterParams, 0, compoundParams, inheritedFilterParams.length, filterParams.length);
filter = ExpressionFactory.INSTANCE.matchExpression(ExpressionFactory.INSTANCE.and(
inheritedFilter, filter), compoundParams);
}
}
// TODO Use this to activate the "version enumeration" policy workaround
// requirements[i++] = new MultiRangeRequirement(name, namespace, iuEntry.getValue(), null, filter);
requirements[i++] = MetadataFactory.createRequirement(namespace, name, ANY_VERSION, filter, false, false);
}
return requirements;
}
| public static IRequirement[] createAllAvailableVersionsRequirements(List<IInstallableUnit> ius,
final IMatchExpression<IInstallableUnit> filter) {
Map<String, Set<Version>> versionMap = new HashMap<String, Set<Version>>();
Map<String, Set<IMatchExpression<IInstallableUnit>>> filterMap = new HashMap<String, Set<IMatchExpression<IInstallableUnit>>>();
for(IInstallableUnit iu : ius) {
Set<Version> versionSet = versionMap.get(iu.getId());
if(versionSet == null) {
versionMap.put(iu.getId(), versionSet = new HashSet<Version>());
}
Set<IMatchExpression<IInstallableUnit>> filterSet = filterMap.get(iu.getId());
if(filterSet == null) {
filterMap.put(iu.getId(), filterSet = new HashSet<IMatchExpression<IInstallableUnit>>());
}
versionSet.add(iu.getVersion());
filterSet.add(iu.getFilter());
}
IRequirement[] requirements = new IRequirement[versionMap.size()];
int i = 0;
for(Map.Entry<String, Set<Version>> iuEntry : versionMap.entrySet()) {
String name = iuEntry.getKey();
String namespace = IInstallableUnit.NAMESPACE_IU_ID;
IMatchExpression<IInstallableUnit> inheritedFilter = null;
for(IMatchExpression<IInstallableUnit> iuFilter : filterMap.get(name)) {
if(inheritedFilter == null)
inheritedFilter = iuFilter;
else if(!inheritedFilter.equals(iuFilter)) {
LogUtils.info("More than one filter definition found on %s; using an empty filter", name);
inheritedFilter = null;
break;
}
}
IMatchExpression<IInstallableUnit> requirementFilter = filter;
if(inheritedFilter != null) {
if(requirementFilter == null)
requirementFilter = inheritedFilter;
else {
Object[] inheritedFilterParams = inheritedFilter.getParameters();
Object[] filterParams = filter.getParameters();
Object[] compoundParams = new Object[inheritedFilterParams.length + filterParams.length];
System.arraycopy(inheritedFilterParams, 0, compoundParams, 0, inheritedFilterParams.length);
System.arraycopy(filterParams, 0, compoundParams, inheritedFilterParams.length, filterParams.length);
requirementFilter = ExpressionFactory.INSTANCE.matchExpression(
ExpressionFactory.INSTANCE.and(inheritedFilter, filter), compoundParams);
}
}
// TODO Use this to activate the "version enumeration" policy workaround
// requirements[i++] = new MultiRangeRequirement(name, namespace, iuEntry.getValue(), null, filter);
requirements[i++] = MetadataFactory.createRequirement(
namespace, name, ANY_VERSION, requirementFilter, false, false);
}
return requirements;
}
|
diff --git a/src/nl/giantit/minecraft/GiantShop/core/Tools/dbInit/dbInit.java b/src/nl/giantit/minecraft/GiantShop/core/Tools/dbInit/dbInit.java
index 3b56a36..0bae739 100644
--- a/src/nl/giantit/minecraft/GiantShop/core/Tools/dbInit/dbInit.java
+++ b/src/nl/giantit/minecraft/GiantShop/core/Tools/dbInit/dbInit.java
@@ -1,330 +1,330 @@
package nl.giantit.minecraft.GiantShop.core.Tools.dbInit;
import nl.giantit.minecraft.GiantShop.GiantShop;
import nl.giantit.minecraft.GiantShop.core.Database.drivers.iDriver;
import nl.giantit.minecraft.GiantShop.core.Tools.dbInit.Updates.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.logging.Level;
public class dbInit {
private iDriver dbDriver;
private double curS = 1.0, curI = 1.1, curD = 1.2;
private void init() {
if(!this.dbDriver.tableExists("#__versions")) {
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("tableName", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
data.put("DEFAULT", "1.0");
fields.put("version", data);
this.dbDriver.create("#__versions").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Revisions table successfully created!");
}
if(!this.dbDriver.tableExists("#__log")){
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "log");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "true");
fields.put("type", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("data", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "50");
data.put("NULL", "false");
data.put("DEFAULT", "0");
fields.put("date", data);
this.dbDriver.create("#__log").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Logging table successfully created!");
}
if(!this.dbDriver.tableExists("#__shops")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "shops");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("name", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("world", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinX", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinY", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinZ", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxX", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxY", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
+ data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxZ", data);
this.dbDriver.create("#__shops").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Shops table successfully created!");
}
if(!this.dbDriver.tableExists("#__items")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "items");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("type", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
- data.put("LENGTH", "3");
+ data.put("TYPE", "DOUBLE");
+ data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("sellFor", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
- data.put("LENGTH", "3");
+ data.put("TYPE", "DOUBLE");
+ data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("buyFor", data);
data = new HashMap<String, String>();
- data.put("TYPE", "INT");
- data.put("LENGTH", "3");
+ data.put("TYPE", "DOUBLE");
+ data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("stock", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "1");
fields.put("perStack", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("shops", data);
this.dbDriver.create("#__items").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Items table successfully created!");
}
if(!this.dbDriver.tableExists("#__discounts")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "discounts");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "10");
fields.put("discount", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("`group`", data);
this.dbDriver.create("#__discounts").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Discounts type table successfully created!");
}
}
private void checkUpdate() {
ArrayList<HashMap<String, String>> resSet = this.dbDriver.select("tablename", "version").from("#__versions").execQuery();
for(int i = 0; i < resSet.size(); i++) {
HashMap<String, String> res = resSet.get(i);
String table = res.get("tableName");
Double version = Double.parseDouble(res.get("version"));
if(table.equalsIgnoreCase("shops") && version < curS) {
Shops.run(version);
}else if(table.equalsIgnoreCase("items") && version < curI) {
Items.run(version);
}else if(table.equalsIgnoreCase("discounts") && version < curD) {
Discounts.run(version);
}
}
}
public dbInit(GiantShop plugin) {
this.dbDriver = plugin.getDB().getEngine();
this.init();
this.checkUpdate();
}
}
| false | true | private void init() {
if(!this.dbDriver.tableExists("#__versions")) {
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("tableName", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
data.put("DEFAULT", "1.0");
fields.put("version", data);
this.dbDriver.create("#__versions").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Revisions table successfully created!");
}
if(!this.dbDriver.tableExists("#__log")){
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "log");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "true");
fields.put("type", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("data", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "50");
data.put("NULL", "false");
data.put("DEFAULT", "0");
fields.put("date", data);
this.dbDriver.create("#__log").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Logging table successfully created!");
}
if(!this.dbDriver.tableExists("#__shops")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "shops");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("name", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("world", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinX", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinY", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinZ", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxX", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxY", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxZ", data);
this.dbDriver.create("#__shops").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Shops table successfully created!");
}
if(!this.dbDriver.tableExists("#__items")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "items");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("type", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("sellFor", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("buyFor", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("stock", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "1");
fields.put("perStack", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("shops", data);
this.dbDriver.create("#__items").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Items table successfully created!");
}
if(!this.dbDriver.tableExists("#__discounts")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "discounts");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "10");
fields.put("discount", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("`group`", data);
this.dbDriver.create("#__discounts").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Discounts type table successfully created!");
}
}
| private void init() {
if(!this.dbDriver.tableExists("#__versions")) {
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("tableName", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
data.put("DEFAULT", "1.0");
fields.put("version", data);
this.dbDriver.create("#__versions").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Revisions table successfully created!");
}
if(!this.dbDriver.tableExists("#__log")){
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "log");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "true");
fields.put("type", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("data", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "50");
data.put("NULL", "false");
data.put("DEFAULT", "0");
fields.put("date", data);
this.dbDriver.create("#__log").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Logging table successfully created!");
}
if(!this.dbDriver.tableExists("#__shops")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "shops");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "false");
fields.put("name", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("world", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinX", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinY", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMinZ", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxX", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxY", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("NULL", "false");
fields.put("locMaxZ", data);
this.dbDriver.create("#__shops").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Shops table successfully created!");
}
if(!this.dbDriver.tableExists("#__items")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "items");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "-1");
fields.put("type", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("sellFor", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("buyFor", data);
data = new HashMap<String, String>();
data.put("TYPE", "DOUBLE");
data.put("LENGTH", null);
data.put("DEFAULT", "-1");
fields.put("stock", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "1");
fields.put("perStack", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("shops", data);
this.dbDriver.create("#__items").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Items table successfully created!");
}
if(!this.dbDriver.tableExists("#__discounts")) {
ArrayList<String> field = new ArrayList<String>();
field.add("tablename");
field.add("version");
HashMap<Integer, HashMap<String, String>> d = new HashMap<Integer, HashMap<String, String>>();
HashMap<String, String> data = new HashMap<String, String>();
data.put("data", "discounts");
d.put(0, data);
data = new HashMap<String, String>();
data.put("data", "1.0");
d.put(1, data);
this.dbDriver.insert("#__versions", field, d).Finalize();
this.dbDriver.updateQuery();
HashMap<String, HashMap<String, String>> fields = new HashMap<String, HashMap<String, String>>();
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
data.put("A_INCR", "true");
data.put("P_KEY", "true");
fields.put("id", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("NULL", "false");
fields.put("itemID", data);
data = new HashMap<String, String>();
data.put("TYPE", "INT");
data.put("LENGTH", "3");
data.put("DEFAULT", "10");
fields.put("discount", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("user", data);
data = new HashMap<String, String>();
data.put("TYPE", "VARCHAR");
data.put("LENGTH", "100");
data.put("NULL", "true");
fields.put("`group`", data);
this.dbDriver.create("#__discounts").fields(fields).Finalize();
this.dbDriver.updateQuery();
GiantShop.log.log(Level.INFO, "Discounts type table successfully created!");
}
}
|
diff --git a/plugins/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/DataAdapterUtil.java b/plugins/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/DataAdapterUtil.java
index 93952e981..92c610aad 100644
--- a/plugins/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/DataAdapterUtil.java
+++ b/plugins/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/DataAdapterUtil.java
@@ -1,381 +1,383 @@
/*
*************************************************************************
* Copyright (c) 2006 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*
*************************************************************************
*/
package org.eclipse.birt.report.data.adapter.internal.adapter;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.eclipse.birt.core.data.DataType;
import org.eclipse.birt.core.data.ExpressionUtil;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.data.engine.api.IColumnDefinition;
import org.eclipse.birt.data.engine.api.querydefn.BaseDataSetDesign;
import org.eclipse.birt.data.engine.api.querydefn.BaseDataSourceDesign;
import org.eclipse.birt.data.engine.api.querydefn.ColumnDefinition;
import org.eclipse.birt.report.data.adapter.api.AdapterException;
import org.eclipse.birt.report.data.adapter.i18n.ResourceConstants;
import org.eclipse.birt.report.model.api.ColumnHintHandle;
import org.eclipse.birt.report.model.api.ComputedColumnHandle;
import org.eclipse.birt.report.model.api.DataSetHandle;
import org.eclipse.birt.report.model.api.DataSetParameterHandle;
import org.eclipse.birt.report.model.api.DataSourceHandle;
import org.eclipse.birt.report.model.api.FilterConditionHandle;
import org.eclipse.birt.report.model.api.JointDataSetHandle;
import org.eclipse.birt.report.model.api.OdaDataSetHandle;
import org.eclipse.birt.report.model.api.OdaDataSetParameterHandle;
import org.eclipse.birt.report.model.api.OdaResultSetColumnHandle;
import org.eclipse.birt.report.model.api.ParamBindingHandle;
import org.eclipse.birt.report.model.api.ReportElementHandle;
import org.eclipse.birt.report.model.api.ResultSetColumnHandle;
import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants;
import org.eclipse.birt.report.model.api.metadata.IPropertyDefn;
/**
* Utility class for data adaptors
*/
class DataAdapterUtil
{
/**
* Adapts common base data source properties
*/
public static void adaptBaseDataSource( DataSourceHandle source, BaseDataSourceDesign dest )
{
dest.setBeforeOpenScript( source.getBeforeOpen( ) );
dest.setAfterOpenScript( source.getAfterOpen( ) );
dest.setBeforeCloseScript( source.getBeforeClose( ) );
dest.setAfterCloseScript( source.getAfterClose( ) );
}
/**
* Adapts base data set properties
*/
public static void adaptBaseDataSet( DataSetHandle modelDataSet,
BaseDataSetDesign dteDataSet ) throws BirtException
{
if ( ( !( modelDataSet instanceof JointDataSetHandle ) )
&& modelDataSet.getDataSource( ) == null )
{
throw new AdapterException( ResourceConstants.DATASOURCE_NULL_ERROR );
}
if ( !( modelDataSet instanceof JointDataSetHandle ) )
{
dteDataSet.setDataSource( modelDataSet.getDataSource( )
.getQualifiedName( ) );
dteDataSet.setBeforeOpenScript( modelDataSet.getBeforeOpen( ) );
dteDataSet.setAfterOpenScript( modelDataSet.getAfterOpen( ) );
dteDataSet.setOnFetchScript( modelDataSet.getOnFetch( ) );
dteDataSet.setBeforeCloseScript( modelDataSet.getBeforeClose( ) );
dteDataSet.setAfterCloseScript( modelDataSet.getAfterClose( ) );
}
populateParameter( modelDataSet, dteDataSet );
populateComputedColumn( modelDataSet, dteDataSet );
populateFilter( modelDataSet, dteDataSet );
dteDataSet.setRowFetchLimit( modelDataSet.getRowFetchLimit( ) );
mergeHints( modelDataSet, dteDataSet );
}
/**
*
* @param modelDataSet
* @param dteDataSet
*/
private static void populateParameter( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet )
{
//dataset parameters definition
HashMap paramBindingCandidates = new HashMap( );
Iterator elmtIter = modelDataSet.parametersIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
DataSetParameterHandle modelParam = ( DataSetParameterHandle ) elmtIter
.next( );
// collect input parameter default values as
// potential parameter binding if no explicit ones are
// defined for a parameter
if ( modelParam.isInput( ) )
{
String defaultValueExpr = null;
if ( modelParam instanceof OdaDataSetParameterHandle
&& ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) != null )
{
defaultValueExpr = ExpressionUtil.createJSParameterExpression( ( ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) ) );
}
else
defaultValueExpr = modelParam.getDefaultValue( );
- dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
if ( defaultValueExpr != null )
+ {
+ dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
paramBindingCandidates.put( modelParam.getName( ),
new ExpressionAdapter( defaultValueExpr,
org.eclipse.birt.report.data.adapter.api.DataAdapterUtil.modelDataTypeToCoreDataType( modelParam.getDataType( ) ) ) );
+ }
}
else
{
dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
}
}
}
// input parameter bindings
elmtIter = modelDataSet.paramBindingsIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
ParamBindingHandle modelParamBinding = ( ParamBindingHandle ) elmtIter
.next( );
// replace default value of the same parameter, if defined
if ( modelParamBinding.getExpression( ) != null )
paramBindingCandidates.put( modelParamBinding.getParamName( ),
new ExpressionAdapter( modelParamBinding.getExpression( ),
DataType.ANY_TYPE ) );
}
}
// assign merged parameter bindings to the data set
if ( paramBindingCandidates.size( ) > 0 )
{
elmtIter = paramBindingCandidates.keySet( ).iterator( );
while ( elmtIter.hasNext( ) )
{
Object paramName = elmtIter.next( );
assert ( paramName != null && paramName instanceof String );
ExpressionAdapter expression = ( ExpressionAdapter ) paramBindingCandidates
.get( paramName );
dteDataSet.addInputParamBinding( new InputParamBindingAdapter( (String) paramName,
expression ) );
}
}
}
/**
*
* @param modelDataSet
* @param dteDataSet
* @throws AdapterException
*/
private static void populateComputedColumn( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet ) throws AdapterException
{
// computed columns
Iterator elmtIter = modelDataSet.computedColumnsIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
ComputedColumnHandle modelCmptdColumn = ( ComputedColumnHandle ) elmtIter
.next( );
dteDataSet.addComputedColumn( new ComputedColumnAdapter( modelCmptdColumn ));
}
}
}
/**
*
* @param modelDataSet
* @param dteDataSet
*/
private static void populateFilter( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet )
{
// filter conditions
Iterator elmtIter = modelDataSet.filtersIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
FilterConditionHandle modelFilter = ( FilterConditionHandle ) elmtIter
.next( );
dteDataSet.addFilter( new FilterAdapter( modelFilter ) );
}
}
}
/**
*
* @param modelDataSet
* @param dteDataSet
*/
private static void mergeHints( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet )
{
// merge ResultSetHints and ColumnHints, the order is important.
// ResultSetHints will give each column a unique name, and
// column hints should base on the result of ResultSet hint.
// So in ResultSetHint list, the order of items should be
// ResultSetColumn and then ColumnHint.
// now merge model's result set column info into existing columnDefn
// with same column name, otherwise create new columnDefn
// based on the model's result set column
Iterator elmtIter;
if ( modelDataSet instanceof OdaDataSetHandle )
{
elmtIter = modelDataSet.resultSetIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
OdaResultSetColumnHandle modelColumn = (OdaResultSetColumnHandle) elmtIter.next( );
if ( !modelColumn.getColumnName( )
.equals( modelColumn.getNativeName( ) ) )
dteDataSet.addResultSetHint( new ColumnAdapter( (ResultSetColumnHandle) modelColumn ) );
}
}
}
elmtIter = modelDataSet.resultSetHintsIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
ResultSetColumnHandle modelColumn = (ResultSetColumnHandle) elmtIter.next( );
dteDataSet.addResultSetHint( new ColumnAdapter( modelColumn ) );
}
}
// merging result set column and column hints into DtE columnDefn;
// first create new columnDefn based on model's column hints
elmtIter = modelDataSet.columnHintsIterator( );
if ( elmtIter != null )
{
List columnDefns = dteDataSet.getResultSetHints( );
while ( elmtIter.hasNext( ) )
{
ColumnHintHandle modelColumnHint = ( ColumnHintHandle ) elmtIter
.next( );
ColumnDefinition existDefn = findColumnDefn( columnDefns,
modelColumnHint.getColumnName( ) );
if ( existDefn != null )
updateColumnDefn( existDefn, modelColumnHint );
else
dteDataSet
.addResultSetHint( new ColumnAdapter( modelColumnHint ) );
}
}
}
public static void updateColumnDefn( ColumnDefinition dteColumn,
ColumnHintHandle modelColumnHint )
{
assert dteColumn.getColumnName( ).equals(
modelColumnHint.getColumnName( ) );
dteColumn.setAlias( modelColumnHint.getAlias( ) );
String exportConstant = modelColumnHint.getExport( );
if ( exportConstant != null )
{
int exportHint = IColumnDefinition.DONOT_EXPORT; // default value
if ( exportConstant
.equals( DesignChoiceConstants.EXPORT_TYPE_IF_REALIZED ) )
exportHint = IColumnDefinition.EXPORT_IF_REALIZED;
else if ( exportConstant
.equals( DesignChoiceConstants.EXPORT_TYPE_ALWAYS ) )
exportHint = IColumnDefinition.ALWAYS_EXPORT;
else
assert exportConstant
.equals( DesignChoiceConstants.EXPORT_TYPE_NONE );
dteColumn.setExportHint( exportHint );
}
String searchConstant = modelColumnHint.getSearching( );
if ( searchConstant != null )
{
int searchHint = IColumnDefinition.NOT_SEARCHABLE;
if ( searchConstant
.equals( DesignChoiceConstants.SEARCH_TYPE_INDEXED ) )
searchHint = IColumnDefinition.SEARCHABLE_IF_INDEXED;
else if ( searchConstant
.equals( DesignChoiceConstants.SEARCH_TYPE_ANY ) )
searchHint = IColumnDefinition.ALWAYS_SEARCHABLE;
else
assert searchConstant
.equals( DesignChoiceConstants.SEARCH_TYPE_NONE );
dteColumn.setSearchHint( searchHint );
}
}
/**
* Find the DtE columnDefn from the given list of columnDefns that matches
* the given columnName.
*/
private static ColumnDefinition findColumnDefn( List columnDefns, String columnName )
{
assert columnName != null;
if ( columnDefns == null )
return null; // no list to find from
Iterator iter = columnDefns.iterator( );
if ( iter == null )
return null;
// iterate thru each columnDefn, and looks for a match of
// specified column name
while ( iter.hasNext( ) )
{
ColumnDefinition column = ( ColumnDefinition ) iter.next( );
if ( columnName.equals( column.getColumnName( ) ) )
return column;
}
return null;
}
/**
* Gets the data handle's static ROM extension properties name and value
* pairs in String values and returns them in a Map
*/
public static Map getExtensionProperties( ReportElementHandle dataHandle,
List driverPropList )
{
if ( driverPropList == null || driverPropList.isEmpty( ) )
return null; // nothing to add
Map properties = new HashMap( );
Iterator elmtIter = driverPropList.iterator( );
while ( elmtIter.hasNext( ) )
{
IPropertyDefn modelExtProp = ( IPropertyDefn ) elmtIter.next( );
// First get extension property's name
String propName = modelExtProp.getName( );
assert ( propName != null && propName.length( ) > 0 );
// Use property name to get property value
Object propValueObj = dataHandle.getProperty( modelExtProp
.getName( ) );
/*
* An ODA consumer does not distinguish whether a property value is
* not set or explicitly set to null. Its handling is pushed down to
* the underlying data provider.
*/
String propValue = ( propValueObj == null ) ? null : propValueObj
.toString( );
properties.put( propName, propValue );
}
return properties;
}
}
| false | true | private static void populateParameter( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet )
{
//dataset parameters definition
HashMap paramBindingCandidates = new HashMap( );
Iterator elmtIter = modelDataSet.parametersIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
DataSetParameterHandle modelParam = ( DataSetParameterHandle ) elmtIter
.next( );
// collect input parameter default values as
// potential parameter binding if no explicit ones are
// defined for a parameter
if ( modelParam.isInput( ) )
{
String defaultValueExpr = null;
if ( modelParam instanceof OdaDataSetParameterHandle
&& ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) != null )
{
defaultValueExpr = ExpressionUtil.createJSParameterExpression( ( ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) ) );
}
else
defaultValueExpr = modelParam.getDefaultValue( );
dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
if ( defaultValueExpr != null )
paramBindingCandidates.put( modelParam.getName( ),
new ExpressionAdapter( defaultValueExpr,
org.eclipse.birt.report.data.adapter.api.DataAdapterUtil.modelDataTypeToCoreDataType( modelParam.getDataType( ) ) ) );
}
else
{
dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
}
}
}
// input parameter bindings
elmtIter = modelDataSet.paramBindingsIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
ParamBindingHandle modelParamBinding = ( ParamBindingHandle ) elmtIter
.next( );
// replace default value of the same parameter, if defined
if ( modelParamBinding.getExpression( ) != null )
paramBindingCandidates.put( modelParamBinding.getParamName( ),
new ExpressionAdapter( modelParamBinding.getExpression( ),
DataType.ANY_TYPE ) );
}
}
// assign merged parameter bindings to the data set
if ( paramBindingCandidates.size( ) > 0 )
{
elmtIter = paramBindingCandidates.keySet( ).iterator( );
while ( elmtIter.hasNext( ) )
{
Object paramName = elmtIter.next( );
assert ( paramName != null && paramName instanceof String );
ExpressionAdapter expression = ( ExpressionAdapter ) paramBindingCandidates
.get( paramName );
dteDataSet.addInputParamBinding( new InputParamBindingAdapter( (String) paramName,
expression ) );
}
}
}
| private static void populateParameter( DataSetHandle modelDataSet, BaseDataSetDesign dteDataSet )
{
//dataset parameters definition
HashMap paramBindingCandidates = new HashMap( );
Iterator elmtIter = modelDataSet.parametersIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
DataSetParameterHandle modelParam = ( DataSetParameterHandle ) elmtIter
.next( );
// collect input parameter default values as
// potential parameter binding if no explicit ones are
// defined for a parameter
if ( modelParam.isInput( ) )
{
String defaultValueExpr = null;
if ( modelParam instanceof OdaDataSetParameterHandle
&& ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) != null )
{
defaultValueExpr = ExpressionUtil.createJSParameterExpression( ( ( (OdaDataSetParameterHandle) modelParam ).getParamName( ) ) );
}
else
defaultValueExpr = modelParam.getDefaultValue( );
if ( defaultValueExpr != null )
{
dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
paramBindingCandidates.put( modelParam.getName( ),
new ExpressionAdapter( defaultValueExpr,
org.eclipse.birt.report.data.adapter.api.DataAdapterUtil.modelDataTypeToCoreDataType( modelParam.getDataType( ) ) ) );
}
}
else
{
dteDataSet.addParameter( new ParameterAdapter( modelParam ) );
}
}
}
// input parameter bindings
elmtIter = modelDataSet.paramBindingsIterator( );
if ( elmtIter != null )
{
while ( elmtIter.hasNext( ) )
{
ParamBindingHandle modelParamBinding = ( ParamBindingHandle ) elmtIter
.next( );
// replace default value of the same parameter, if defined
if ( modelParamBinding.getExpression( ) != null )
paramBindingCandidates.put( modelParamBinding.getParamName( ),
new ExpressionAdapter( modelParamBinding.getExpression( ),
DataType.ANY_TYPE ) );
}
}
// assign merged parameter bindings to the data set
if ( paramBindingCandidates.size( ) > 0 )
{
elmtIter = paramBindingCandidates.keySet( ).iterator( );
while ( elmtIter.hasNext( ) )
{
Object paramName = elmtIter.next( );
assert ( paramName != null && paramName instanceof String );
ExpressionAdapter expression = ( ExpressionAdapter ) paramBindingCandidates
.get( paramName );
dteDataSet.addInputParamBinding( new InputParamBindingAdapter( (String) paramName,
expression ) );
}
}
}
|
diff --git a/apps/routerconsole/java/src/net/i2p/router/web/ConfigLoggingHandler.java b/apps/routerconsole/java/src/net/i2p/router/web/ConfigLoggingHandler.java
index 772e9879f..f1970e044 100644
--- a/apps/routerconsole/java/src/net/i2p/router/web/ConfigLoggingHandler.java
+++ b/apps/routerconsole/java/src/net/i2p/router/web/ConfigLoggingHandler.java
@@ -1,165 +1,166 @@
package net.i2p.router.web;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Properties;
/**
* Handler to deal with form submissions from the logging config form and act
* upon the values.
*
*/
public class ConfigLoggingHandler extends FormHandler {
private boolean _shouldSave;
private String _levels;
private String _defaultLevel;
private String _filename;
private String _recordFormat;
private String _dateFormat;
private String _fileSize;
private String _newLogClass;
private String _newLogLevel = "WARN";
@Override
protected void processForm() {
if (_shouldSave) {
saveChanges();
} else {
// noop
}
}
public void setShouldsave(String moo) { _shouldSave = true; }
public void setLevels(String levels) {
_levels = (levels != null ? levels.trim() : null);
}
public void setDefaultloglevel(String level) {
_defaultLevel = (level != null ? level.trim() : null);
}
public void setLogfilename(String filename) {
_filename = (filename != null ? filename.trim() : null);
}
public void setLogformat(String format) {
_recordFormat = (format != null ? format.trim() : null);
}
public void setLogdateformat(String format) {
_dateFormat = (format != null ? format.trim() : null);
}
public void setLogfilesize(String size) {
_fileSize = (size != null ? size.trim() : null);
}
/** @since 0.8.1 */
public void setNewlogclass(String s) {
if (s != null && s.length() > 0)
_newLogClass = s;
}
/** @since 0.8.1 */
public void setNewloglevel(String s) {
if (s != null)
_newLogLevel = s;
}
/**
* The user made changes to the config and wants to save them, so
* lets go ahead and do so.
*
*/
private void saveChanges() {
boolean shouldSave = false;
- if (_levels != null || _newLogClass != null) {
+ if ((_levels != null && _levels.length() > 0) || _newLogClass != null) {
try {
Properties props = new Properties();
if (_levels != null)
props.load(new ByteArrayInputStream(_levels.getBytes()));
if (_newLogClass != null)
props.setProperty(_newLogClass, _newLogLevel);
_context.logManager().setLimits(props);
shouldSave = true;
addFormNotice(_("Log overrides updated"));
} catch (IOException ioe) {
// shouldn't ever happen (BAIS shouldnt cause an IOE)
_context.logManager().getLog(ConfigLoggingHandler.class).error("Error reading from the props?", ioe);
addFormError("Error updating the log limits - levels not valid");
}
- } else {
+ } else if (!_context.logManager().getLimits().isEmpty()) {
_context.logManager().setLimits(null);
+ shouldSave = true;
addFormNotice("Log limits cleared");
}
if (_defaultLevel != null) {
String oldDefault = _context.logManager().getDefaultLimit();
if (_defaultLevel.equals(oldDefault)) {
// noop
} else {
shouldSave = true;
_context.logManager().setDefaultLimit(_defaultLevel);
addFormNotice("Default log level updated from " + oldDefault + " to " + _defaultLevel);
}
}
if (_dateFormat != null && !_dateFormat.equals(_context.logManager().getDateFormatPattern())) {
boolean valid = _context.logManager().setDateFormat(_dateFormat);
if (valid) {
shouldSave = true;
addFormNotice("Date format updated");
} else {
addFormError("Specified date format is not valid (" + _dateFormat + ") - not updated");
}
}
if (_fileSize != null) {
int newBytes = _context.logManager().getFileSize(_fileSize);
int oldBytes = _context.logManager().getFileSize();
if (newBytes > 0) {
if (oldBytes != newBytes) {
_context.logManager().setFileSize(newBytes);
shouldSave = true;
addFormNotice("File size updated");
}
} else {
addFormError("Specified file size limit is not valid (" + _fileSize + ") - not updated");
}
}
if ( (_filename != null) && (_filename.trim().length() > 0) ) {
_filename = _filename.trim();
String old = _context.logManager().getBaseLogfilename();
if ( (old != null) && (_filename.equals(old)) ) {
// noop - don't update since its the same
} else {
shouldSave = true;
_context.logManager().setBaseLogfilename(_filename);
addFormNotice("Log file name pattern updated to " + _filename
+ " (note: will not take effect until next rotation)");
}
}
if ( (_recordFormat != null) && (_recordFormat.trim().length() > 0) ) {
_recordFormat = _recordFormat.trim();
String old = new String(_context.logManager().getFormat());
if (_recordFormat.equalsIgnoreCase(old)) {
// noop - no change
} else {
char fmt[] = new char[_recordFormat.length()];
for (int i = 0; i < fmt.length; i++)
fmt[i] = _recordFormat.charAt(i);
_context.logManager().setFormat(fmt);
shouldSave = true;
addFormNotice("Log record format updated");
}
}
if (shouldSave) {
boolean saved = _context.logManager().saveConfig();
if (saved)
addFormNotice(_("Log configuration saved"));
else
addFormError("Error saving the configuration (applied but not saved) - please see the error logs");
}
}
}
| false | true | private void saveChanges() {
boolean shouldSave = false;
if (_levels != null || _newLogClass != null) {
try {
Properties props = new Properties();
if (_levels != null)
props.load(new ByteArrayInputStream(_levels.getBytes()));
if (_newLogClass != null)
props.setProperty(_newLogClass, _newLogLevel);
_context.logManager().setLimits(props);
shouldSave = true;
addFormNotice(_("Log overrides updated"));
} catch (IOException ioe) {
// shouldn't ever happen (BAIS shouldnt cause an IOE)
_context.logManager().getLog(ConfigLoggingHandler.class).error("Error reading from the props?", ioe);
addFormError("Error updating the log limits - levels not valid");
}
} else {
_context.logManager().setLimits(null);
addFormNotice("Log limits cleared");
}
if (_defaultLevel != null) {
String oldDefault = _context.logManager().getDefaultLimit();
if (_defaultLevel.equals(oldDefault)) {
// noop
} else {
shouldSave = true;
_context.logManager().setDefaultLimit(_defaultLevel);
addFormNotice("Default log level updated from " + oldDefault + " to " + _defaultLevel);
}
}
if (_dateFormat != null && !_dateFormat.equals(_context.logManager().getDateFormatPattern())) {
boolean valid = _context.logManager().setDateFormat(_dateFormat);
if (valid) {
shouldSave = true;
addFormNotice("Date format updated");
} else {
addFormError("Specified date format is not valid (" + _dateFormat + ") - not updated");
}
}
if (_fileSize != null) {
int newBytes = _context.logManager().getFileSize(_fileSize);
int oldBytes = _context.logManager().getFileSize();
if (newBytes > 0) {
if (oldBytes != newBytes) {
_context.logManager().setFileSize(newBytes);
shouldSave = true;
addFormNotice("File size updated");
}
} else {
addFormError("Specified file size limit is not valid (" + _fileSize + ") - not updated");
}
}
if ( (_filename != null) && (_filename.trim().length() > 0) ) {
_filename = _filename.trim();
String old = _context.logManager().getBaseLogfilename();
if ( (old != null) && (_filename.equals(old)) ) {
// noop - don't update since its the same
} else {
shouldSave = true;
_context.logManager().setBaseLogfilename(_filename);
addFormNotice("Log file name pattern updated to " + _filename
+ " (note: will not take effect until next rotation)");
}
}
if ( (_recordFormat != null) && (_recordFormat.trim().length() > 0) ) {
_recordFormat = _recordFormat.trim();
String old = new String(_context.logManager().getFormat());
if (_recordFormat.equalsIgnoreCase(old)) {
// noop - no change
} else {
char fmt[] = new char[_recordFormat.length()];
for (int i = 0; i < fmt.length; i++)
fmt[i] = _recordFormat.charAt(i);
_context.logManager().setFormat(fmt);
shouldSave = true;
addFormNotice("Log record format updated");
}
}
if (shouldSave) {
boolean saved = _context.logManager().saveConfig();
if (saved)
addFormNotice(_("Log configuration saved"));
else
addFormError("Error saving the configuration (applied but not saved) - please see the error logs");
}
}
| private void saveChanges() {
boolean shouldSave = false;
if ((_levels != null && _levels.length() > 0) || _newLogClass != null) {
try {
Properties props = new Properties();
if (_levels != null)
props.load(new ByteArrayInputStream(_levels.getBytes()));
if (_newLogClass != null)
props.setProperty(_newLogClass, _newLogLevel);
_context.logManager().setLimits(props);
shouldSave = true;
addFormNotice(_("Log overrides updated"));
} catch (IOException ioe) {
// shouldn't ever happen (BAIS shouldnt cause an IOE)
_context.logManager().getLog(ConfigLoggingHandler.class).error("Error reading from the props?", ioe);
addFormError("Error updating the log limits - levels not valid");
}
} else if (!_context.logManager().getLimits().isEmpty()) {
_context.logManager().setLimits(null);
shouldSave = true;
addFormNotice("Log limits cleared");
}
if (_defaultLevel != null) {
String oldDefault = _context.logManager().getDefaultLimit();
if (_defaultLevel.equals(oldDefault)) {
// noop
} else {
shouldSave = true;
_context.logManager().setDefaultLimit(_defaultLevel);
addFormNotice("Default log level updated from " + oldDefault + " to " + _defaultLevel);
}
}
if (_dateFormat != null && !_dateFormat.equals(_context.logManager().getDateFormatPattern())) {
boolean valid = _context.logManager().setDateFormat(_dateFormat);
if (valid) {
shouldSave = true;
addFormNotice("Date format updated");
} else {
addFormError("Specified date format is not valid (" + _dateFormat + ") - not updated");
}
}
if (_fileSize != null) {
int newBytes = _context.logManager().getFileSize(_fileSize);
int oldBytes = _context.logManager().getFileSize();
if (newBytes > 0) {
if (oldBytes != newBytes) {
_context.logManager().setFileSize(newBytes);
shouldSave = true;
addFormNotice("File size updated");
}
} else {
addFormError("Specified file size limit is not valid (" + _fileSize + ") - not updated");
}
}
if ( (_filename != null) && (_filename.trim().length() > 0) ) {
_filename = _filename.trim();
String old = _context.logManager().getBaseLogfilename();
if ( (old != null) && (_filename.equals(old)) ) {
// noop - don't update since its the same
} else {
shouldSave = true;
_context.logManager().setBaseLogfilename(_filename);
addFormNotice("Log file name pattern updated to " + _filename
+ " (note: will not take effect until next rotation)");
}
}
if ( (_recordFormat != null) && (_recordFormat.trim().length() > 0) ) {
_recordFormat = _recordFormat.trim();
String old = new String(_context.logManager().getFormat());
if (_recordFormat.equalsIgnoreCase(old)) {
// noop - no change
} else {
char fmt[] = new char[_recordFormat.length()];
for (int i = 0; i < fmt.length; i++)
fmt[i] = _recordFormat.charAt(i);
_context.logManager().setFormat(fmt);
shouldSave = true;
addFormNotice("Log record format updated");
}
}
if (shouldSave) {
boolean saved = _context.logManager().saveConfig();
if (saved)
addFormNotice(_("Log configuration saved"));
else
addFormError("Error saving the configuration (applied but not saved) - please see the error logs");
}
}
|
diff --git a/src/main/java/de/taimos/maven_redmine_plugin/model/DateDeserializer.java b/src/main/java/de/taimos/maven_redmine_plugin/model/DateDeserializer.java
index eb02f26..d0a95bb 100644
--- a/src/main/java/de/taimos/maven_redmine_plugin/model/DateDeserializer.java
+++ b/src/main/java/de/taimos/maven_redmine_plugin/model/DateDeserializer.java
@@ -1,63 +1,63 @@
package de.taimos.maven_redmine_plugin.model;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.DeserializationContext;
import org.codehaus.jackson.map.JsonDeserializer;
/**
* @author thoeger
*
*/
public class DateDeserializer extends JsonDeserializer<Date> {
@Override
public Date deserialize(final JsonParser jp, final DeserializationContext ctx) throws IOException, JsonProcessingException {
final String text = jp.getText();
return DateDeserializer.parse(text);
}
static Date parse(final String text) throws JsonParseException {
// BEWARE THIS IS UGLY CODE STYLE
// Redmine 2.x 2012-01-06T14:43:04Z
Date parsed = DateDeserializer.parseString(text, "yyyy-MM-dd'T'HH:mm:ssZ");
if (parsed == null) {
// Redmine 2.x Date only 2012-01-06
parsed = DateDeserializer.parseString(text, "yyyy-MM-dd");
}
if (parsed == null) {
// Redmine 1.x 2012/10/09 09:29:19 +0200
parsed = DateDeserializer.parseString(text, "yyyy/MM/dd HH:mm:ss Z");
}
if (parsed == null) {
// Redmine 1.x Date only 2012/10/09
- DateDeserializer.parseString(text, "yyyy/MM/dd");
+ parsed = DateDeserializer.parseString(text, "yyyy/MM/dd");
}
if (parsed == null) {
throw new RuntimeException("Cannot parse date");
}
return parsed;
}
private static Date parseString(String s, String pattern) {
try {
final SimpleDateFormat sdf = new SimpleDateFormat(pattern);
return sdf.parse(s);
} catch (final ParseException e) {
// cannot parse date so we try other format
}
return null;
}
}
| true | true | static Date parse(final String text) throws JsonParseException {
// BEWARE THIS IS UGLY CODE STYLE
// Redmine 2.x 2012-01-06T14:43:04Z
Date parsed = DateDeserializer.parseString(text, "yyyy-MM-dd'T'HH:mm:ssZ");
if (parsed == null) {
// Redmine 2.x Date only 2012-01-06
parsed = DateDeserializer.parseString(text, "yyyy-MM-dd");
}
if (parsed == null) {
// Redmine 1.x 2012/10/09 09:29:19 +0200
parsed = DateDeserializer.parseString(text, "yyyy/MM/dd HH:mm:ss Z");
}
if (parsed == null) {
// Redmine 1.x Date only 2012/10/09
DateDeserializer.parseString(text, "yyyy/MM/dd");
}
if (parsed == null) {
throw new RuntimeException("Cannot parse date");
}
return parsed;
}
| static Date parse(final String text) throws JsonParseException {
// BEWARE THIS IS UGLY CODE STYLE
// Redmine 2.x 2012-01-06T14:43:04Z
Date parsed = DateDeserializer.parseString(text, "yyyy-MM-dd'T'HH:mm:ssZ");
if (parsed == null) {
// Redmine 2.x Date only 2012-01-06
parsed = DateDeserializer.parseString(text, "yyyy-MM-dd");
}
if (parsed == null) {
// Redmine 1.x 2012/10/09 09:29:19 +0200
parsed = DateDeserializer.parseString(text, "yyyy/MM/dd HH:mm:ss Z");
}
if (parsed == null) {
// Redmine 1.x Date only 2012/10/09
parsed = DateDeserializer.parseString(text, "yyyy/MM/dd");
}
if (parsed == null) {
throw new RuntimeException("Cannot parse date");
}
return parsed;
}
|
diff --git a/src/share/org/dianexus/triceps/TricepsServlet.java b/src/share/org/dianexus/triceps/TricepsServlet.java
index e43e09b..3d6a872 100644
--- a/src/share/org/dianexus/triceps/TricepsServlet.java
+++ b/src/share/org/dianexus/triceps/TricepsServlet.java
@@ -1,756 +1,763 @@
import java.util.*;
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
import java.net.*;
/**
* This is the central engine that iterates through the nodes
* in a schedule producing, e.g., an interview. It also organizes
* the connection to the display. In the first version, this is
* an http response as defined in the JSDK.
*/
public class TricepsServlet extends HttpServlet {
private Triceps triceps;
private HttpServletRequest req;
private HttpServletResponse res;
private PrintWriter out;
private String firstFocus = null;
private String scheduleList = "";
private String scheduleSrcDir = "";
private String workingFilesDir = "";
private String completedFilesDir = "";
/* hidden variables */
private boolean debug = false;
private boolean developerMode = false;
private boolean refuseToAnswerCurrent = false;
private boolean showQuestionNum = false;
private String directive = null; // the default
/**
* This method runs only when the servlet is first loaded by the
* webserver. It calls the loadSchedule method to input all the
* nodes into memory. The Schedule is then available to all
* sessions that might be running.
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
String s;
s = config.getInitParameter("scheduleList");
if (s != null && !s.trim().equals(""))
scheduleList = s.trim();
s = config.getInitParameter("scheduleSrcDir");
if (s != null && !s.trim().equals(""))
scheduleSrcDir = s.trim();
s = config.getInitParameter("workingFilesDir");
if (s != null && !s.trim().equals(""))
workingFilesDir = s.trim();
s = config.getInitParameter("completedFilesDir");
if (s != null && !s.trim().equals(""))
completedFilesDir = s.trim();
}
public void destroy() {
super.destroy();
}
/**
* This method is invoked when an initial URL request is made to the servlet.
* It initializes a session and prepares a response to the client that will
* invoke the POST method on further requests.
*/
public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
doPost(req,res);
}
/**
* This method is invoked when the servlet is requested with POST variables. This is
* the case after the first request, handled by doGet(), and all further requests.
*/
public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
this.req = req;
this.res = res;
HttpSession session = req.getSession(true);
String form = null;
String hiddenStr = "";
firstFocus = null; // reset it each time
triceps = (Triceps) session.getValue("triceps");
res.setContentType("text/html");
directive = req.getParameter("directive"); // XXX: directive must be set before calling processHidden
hiddenStr = processHidden();
/* Process the form */
try {
form = processDirective();
}
catch (Exception e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
out = res.getWriter();
out.println(header());
out.println(getCustomHeader());
if (form != null) {
out.println("<FORM method='POST' name='myForm' action='" + HttpUtils.getRequestURL(req) + "'>\n");
out.println(hiddenStr);
out.println(form);
out.println("</FORM>\n");
}
out.println(footer());
/* Store appropriate stuff in the session */
if (triceps != null)
session.putValue("triceps", triceps);
out.flush();
// out.close(); // XXX: causes "Network Connection reset by peer" with Ham-D.txt - WHY? Without close, dangling resources?
}
private String processHidden() {
StringBuffer sb = new StringBuffer();
if ("on".equals(req.getParameter("DEBUG"))) {
debug = true;
}
else
debug = false;
if ("on".equals(req.getParameter("developerMode"))) {
developerMode = true;
}
else
developerMode = false;
/* XXX: Kludge - put in HIDDEN if developerMode && debug = false (else lose its state)
This must follow assessement of developerMode && debug, else lose state */
if ("on".equals(req.getParameter("showQuestionNum"))) {
showQuestionNum = true;
if (!debug && !developerMode) {
sb.append("<input type='HIDDEN' name='showQuestionNum' value='on'>\n");
}
}
else
showQuestionNum = false;
String attemptingToRefuse = null;
refuseToAnswerCurrent = false; // the default value
if (triceps != null) {
/* XXX: Refusals only apply once Triceps has been initialized */
attemptingToRefuse = req.getParameter("passwordForRefused");
if (attemptingToRefuse != null && !attemptingToRefuse.trim().equals("")) {
/* if try to enter a password, make sure that doesn't reset the form if password fails */
directive = "next"; // XXX - since JavaScript can't set a SUBMIT value in the subjectRefusesToAnswer() function
if (triceps.getPasswordForRefused() == null) {
sb.append("You are not allowed to refuse to answer these questions<BR>");
}
else {
if (triceps.getPasswordForRefused().equals(attemptingToRefuse)) {
refuseToAnswerCurrent = true;
}
else {
sb.append("Incorrect password to refuse to answer these questions<BR>");
}
}
}
}
sb.append("<input type='HIDDEN' name='passwordForRefused' value=''>\n"); // must manually bypass each time
return sb.toString();
}
private String header() {
StringBuffer sb = new StringBuffer();
sb.append("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2//EN\">\n");
sb.append("<html>\n");
sb.append("<body bgcolor='white'");
if (firstFocus != null) {
sb.append(" onload='javascript:document.myForm." + firstFocus + ".focus()'");
}
sb.append(">\n");
sb.append("<head>\n");
sb.append("<META HTTP-EQUIV='Content-Type' CONTENT='text/html;CHARSET=iso-8859-1'>\n");
sb.append("<title>" + ((triceps == null) ? "TRICEPS SYSTEM" : triceps.getTitle()) + "</title>\n");
sb.append("</head>\n");
sb.append("<body>\n");
sb.append("<SCRIPT>\n");
sb.append("<!--\n");
sb.append("function subjectRefusesToAnswer() {\n");
sb.append(" var ans = prompt('Enter the password to refuse to answer this question','');\n");
sb.append(" if (ans == null) { return; /* to avoid submit */ }\n");
sb.append(" document.myForm.passwordForRefused.value = ans;\n");
sb.append(" document.myForm.submit();\n");
sb.append("} //-->\n");
sb.append("</SCRIPT>\n");
return sb.toString();
}
private String getCustomHeader() {
StringBuffer sb = new StringBuffer();
sb.append("<TABLE BORDER='0' CELLPADDING='0' CELLSPACING='3' WIDTH='100%'>\n");
sb.append("<TR>\n");
sb.append(" <TD WIDTH='18%'>\n");
sb.append(" <A HREF='javascript:subjectRefusesToAnswer();'>\n");
sb.append(" <IMG SRC='file:///C|/cic/images/ciclogo.gif' ALIGN='BOTTOM' BORDER='0' ALT='Children In the Community'>\n");
sb.append(" </A>\n");
sb.append(" </TD>\n");
sb.append(" <TD WIDTH='82%'><FONT SIZE='5'><B>Transitions Study</B></FONT>\n");
sb.append("</TR>\n");
sb.append("</TABLE>\n");
sb.append("<HR>\n");
return sb.toString();
}
private String getCustomFooter() {
return "";
}
private String footer() {
StringBuffer sb = new StringBuffer();
sb.append(getCustomFooter());
sb.append("</body>\n");
sb.append("</html>\n");
return sb.toString();
}
private String processDirective() {
boolean ok = true;
int gotoMsg = Triceps.OK;
StringBuffer sb = new StringBuffer();
StringBuffer schedules = new StringBuffer();
StringBuffer suspendedInterviews = new StringBuffer();
// get the POSTed directive (start, back, next, help, suspend, etc.) - default is opening screen
if (directive == null || "select new interview".equals(directive)) {
/* read list of available schedules from file */
BufferedReader br = Triceps.getReader(scheduleList, scheduleSrcDir);
if (br == null) {
sb.append("<B>Unable to find '" + scheduleList + "'</B><HR>");
}
else {
try {
int count = 0;
int line=0;
String fileLine;
String src;
while ((fileLine = br.readLine()) != null) {
++line;
if (fileLine.startsWith("COMMENT"))
continue;
try {
StringTokenizer schedule = new StringTokenizer(fileLine,"\t");
String title = schedule.nextToken();
String fileLoc = schedule.nextToken();
if (title == null || fileLoc == null)
continue;
/* Test whether these files exist */
Reader target = Triceps.getReader(fileLoc,scheduleSrcDir);
if (target == null) {
sb.append("Unable to access file '" + fileLoc + "'");
}
else {
try { target.close(); } catch (Exception e) {}
++count;
schedules.append(" <option value='" + Node.encodeHTML(fileLoc) + "'>" + Node.encodeHTML(title) + "</option>\n");
}
}
catch (NullPointerException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (NoSuchElementException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (Throwable t) {}
}
}
catch(IOException e) {
sb.append("Error reading from " + scheduleList);
}
catch (Throwable t) {}
finally {
if (br != null) {
try { br.close(); } catch (Throwable t) { }
}
}
/* Now build the list of uncompleted interviews */
try {
File dir = new File(workingFilesDir);
if (dir.isDirectory() && dir.canRead()) {
String[] files = dir.list();
int count=0;
for (int i=0;i<files.length;++i) {
try {
File f = new File(files[i]);
if (!f.isDirectory()) {
if (count == 0) {
suspendedInterviews.append("<select name='RestoreSuspended'>\n <option value=''></option>\n");
}
suspendedInterviews.append(" <option value='" + files[i] + "'>" + files[i] + "</option>\n");
++count;
}
}
catch (Throwable t) {
System.out.println(t.getMessage());
}
}
if (count > 0) {
suspendedInterviews.append("</select><BR>");
}
else {
suspendedInterviews.append(" ");
}
}
else {
System.out.println("can't read from dir " + dir.toString());
}
}
catch(Throwable t) {
System.out.println(t.getMessage());
}
}
/* Now construct splash screen */
sb.append("<TABLE CELLPADDING='2' CELLSPACING='2' BORDER='1'>\n");
sb.append("<TR><TD>Please select an interview/questionnaire from the pull-down list: </TD>\n");
sb.append(" <TD><select name='schedule'>\n");
sb.append(schedules);
sb.append(" </select></TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='START'></TD>\n");
sb.append("</TR>\n");
sb.append("<TR><TD>OR, restore an interview/questionnaire in progress: </TD>\n");
sb.append(" <TD>" + suspendedInterviews +
((developerMode) ? "<input type='text' name='RESTORE'>" : "") +
"</TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='RESTORE'></TD>\n");
sb.append(showOptions());
sb.append("</TABLE>\n");
return sb.toString();
}
else if (directive.equals("START")) {
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(req.getParameter("schedule"),scheduleSrcDir);
if (!ok) {
try {
this.doGet(req,res);
}
catch (ServletException e) {
}
catch (IOException e) {
}
return sb.toString();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("RESTORE")) {
String restore;
restore = req.getParameter("RESTORE");
if (restore == null || restore.trim().equals("")) {
restore = req.getParameter("RestoreSuspended");
}
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(restore,workingFilesDir);
if (!ok) {
directive = null; // so that processDirective() will select new interview
return "<B>Unable to find or access schedule '" + restore + "'</B><HR>" +
processDirective();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("jump to:")) {
gotoMsg = triceps.gotoNode(req.getParameter("jump to:"));
ok = (gotoMsg == Triceps.OK);
// ask this question
}
else if (directive.equals("restart (clean)")) { // restart from scratch
ok = triceps.resetEvidence();
ok = ok && ((gotoMsg = triceps.gotoFirst()) == Triceps.OK); // don't proceed if prior error
// ask first question
}
else if (directive.equals("reload questions")) { // debugging option
ok = triceps.reloadSchedule();
if (ok) {
sb.append("<B>Schedule restored successfully</B><HR>\n");
}
// re-ask current question
}
else if (directive.equals("save to:")) {
String name = req.getParameter("save to:");
String file = workingFilesDir + name;
ok = triceps.toTSV(file);
if (ok) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
else if (directive.equals("evaluate expr:")) {
String expr = req.getParameter("evaluate expr:");
if (expr != null) {
Datum datum = triceps.parser.parse(triceps.evidence, expr);
sb.append("<TABLE WIDTH='100%' CELLPADDING='2' CELLSPACING='1' BORDER=1>\n");
sb.append("<TR><TD>Equation</TD><TD><B>" + Node.encodeHTML(expr) + "</B></TD><TD>Type</TD><TD><B>" + Datum.TYPES[datum.type()] + "</B></TD></TR>\n");
sb.append("<TR><TD>String</TD><TD><B>" + Node.encodeHTML(datum.stringVal(true)) + "</B></TD><TD>boolean</TD><TD><B>" + datum.booleanVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>double</TD><TD><B>" + datum.doubleVal() + "</B></TD><TD>long</TD><TD><B>" + datum.longVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>date</TD><TD><B>" + datum.dateVal() + "</B></TD><TD>month</TD><TD><B>" + datum.monthVal() + "</B></TD></TR>\n");
sb.append("</TABLE>\n");
if (triceps.parser.hasErrors()) {
Vector v = triceps.parser.getErrors();
sb.append("<B>There were errors parsing that equation:</B><BR>");
for (int j=0;j<v.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append(Node.encodeHTML((String) v.elementAt(j)));
}
}
}
}
else if (directive.equals("show XML")) {
sb.append("<B>Use 'Show Source' to see data in Schedule as XML</B><BR>\n");
sb.append("<!--\n" + triceps.toXML() + "\n-->\n");
sb.append("<HR>\n");
}
else if (directive.equals("show Errors")) {
Vector pes = triceps.collectParseErrors();
if (pes.size() == 0) {
sb.append("<B>No errors were found</B><HR>");
}
else {
Vector errs;
for (int i=0;i<pes.size();++i) {
ParseError pe = (ParseError) pes.elementAt(i);
Node n = pe.getNode();
if (i == 0) {
sb.append("<FONT color='red'>The following errors were found in file <B>" + Node.encodeHTML(n.getSourceFile()) + "</B></FONT><BR>\n");
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' border='1'>\n");
sb.append("<TR><TD>line#</TD><TD>name</TD><TD>Dependencies</TD><TD><B>Dependency Errors</B></TD><TD>Action Type</TD><TD>Action</TD><TD><B>Action Errors</B></TD><TD><B>Other Errors</B></TD></TR>\n");
}
sb.append("\n<TR><TD>" + n.getSourceLine() + "</TD><TD>" + Node.encodeHTML(n.getQuestionRef(),true) + "</TD>");
sb.append("\n<TD>" + Node.encodeHTML(pe.getDependencies(),true) + "</TD>\n<TD>");
errs = pe.getDependenciesErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>" + Node.ACTION_TYPES[n.getActionType()] + "</TD><TD>" + Node.encodeHTML(pe.getAction(),true) + "</TD><TD>");
errs = pe.getActionErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>");
errs = pe.getNodeErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + (String) errs.elementAt(j)); // XXX: don't Node.encodeHTML() these, since pre-processed within Node
}
}
sb.append("</TD></TR>");
}
sb.append("</TABLE><HR>\n");
}
}
else if (directive.equals("next")) {
// store current answer(s)
Enumeration questionNames = triceps.getQuestions();
while(questionNames.hasMoreElements()) {
Node q = (Node) questionNames.nextElement();
boolean status;
status = triceps.storeValue(q, req.getParameter(q.getName()),refuseToAnswerCurrent);
ok = status && ok;
}
// goto next
ok = ok && ((gotoMsg = triceps.gotoNext()) == Triceps.OK); // don't proceed if prior errors - e.g. unanswered questions
if (gotoMsg == Triceps.AT_END) {
// save the file, but still give the option to go back and change answers
boolean savedOK;
String name = Datum.format(triceps.getStartTime(),Datum.DATE,Datum.TIME_MASK);
String file = completedFilesDir + name;
sb.append("<B>Thank you, the interview is completed</B><BR>\n");
savedOK = triceps.toTSV(file);
ok = savedOK && ok;
if (savedOK) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
// don't goto next if errors
// ask question
}
else if (directive.equals("previous")) {
// don't store current
// goto previous
gotoMsg = triceps.gotoPrevious();
ok = ok && (gotoMsg == Triceps.OK);
// ask question
}
- if (!ok) {
+// if (!ok)
+ {
+ /* should do this regardless of OK status? Might catch interesting parsing errors? */
+ int errCount = 0;
Enumeration errs = triceps.getErrors();
if (errs.hasMoreElements()) {
while (errs.hasMoreElements()) {
+ ++errCount;
sb.append("<B>" + Node.encodeHTML((String) errs.nextElement()) + "</B><BR>\n");
}
}
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.hasRuntimeErrors()) {
+ ++errCount;
sb.append("<B>Please answer the question(s) listed in <FONT color='red'>RED</FONT> before proceeding</B><BR>\n");
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
- sb.append("<HR>\n");
+ if (errCount > 0) {
+ sb.append("<HR>\n");
+ }
}
if (firstFocus == null) {
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.focusable()) {
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
}
sb.append(queryUser());
return sb.toString();
}
/**
* This method assembles the displayed question and answer options
* and formats them in HTML for return to the client browser.
*/
private String queryUser() {
// if parser internal to Schedule, should have method access it, not directly
StringBuffer sb = new StringBuffer();
if (debug) {
sb.append("<H4>QUESTION AREA</H4>\n");
}
Enumeration questionNames = triceps.getQuestions();
String color;
String errMsg;
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' border='1'>\n");
for(int count=0;questionNames.hasMoreElements();++count) {
Node node = (Node) questionNames.nextElement();
Datum datum = triceps.getDatum(node);
if (node.hasRuntimeErrors()) {
color = " color='red'";
StringBuffer errStr = new StringBuffer("<FONT color='red'>");
Vector errs = node.getRuntimeErrors();
for (int j=0;j<errs.size();++j) {
if (j > 0) {
errStr.append("<BR>\n");
}
errStr.append(Node.encodeHTML((String) errs.elementAt(j)));
}
errStr.append("</FONT>");
errMsg = errStr.toString();
}
else {
color = "";
errMsg = "";
}
sb.append(" <TR>\n");
if (showQuestionNum) {
sb.append("<TD><FONT" + color + "><B>" + Node.encodeHTML(node.getQuestionRef()) + "</FONT></B></TD>\n");
}
switch(node.getAnswerType()) {
case Node.NOTHING:
sb.append(" <TD COLSPAN='2'><FONT" + color + ">" + Node.encodeHTML(triceps.getQuestionStr(node)) + "</FONT></TD>\n");
break;
case Node.RADIO_HORIZONTAL:
sb.append(" <TD COLSPAN='2'><FONT" + color + ">" + Node.encodeHTML(triceps.getQuestionStr(node)) + "</FONT></TD>\n");
sb.append("</TR>\n<TR>\n");
if (showQuestionNum) {
sb.append("<TD> </TD>");
}
sb.append(node.prepareChoicesAsHTML(datum,errMsg));
break;
default:
sb.append(" <TD><FONT" + color + ">" + Node.encodeHTML(triceps.getQuestionStr(node)) + "</FONT></TD>\n");
sb.append(" <TD>" + node.prepareChoicesAsHTML(datum) + errMsg + "</TD>\n");
break;
}
sb.append(" </TR>\n");
}
sb.append(" <TR><TD COLSPAN='" + ((showQuestionNum) ? 3 : 2 ) + "' ALIGN='center'>\n");
sb.append("<input type='SUBMIT' name='directive' value='next'>\n");
sb.append("<input type='SUBMIT' name='directive' value='previous'>");
sb.append(" </TD></TR>\n");
if (developerMode || debug) {
sb.append(" <TR><TD COLSPAN='" + ((showQuestionNum) ? 3 : 2 ) + "' ALIGN='center'>\n");
sb.append("<input type='SUBMIT' name='directive' value='select new interview'>\n");
sb.append("<input type='SUBMIT' name='directive' value='restart (clean)'>\n");
sb.append("<input type='SUBMIT' name='directive' value='jump to:' size='10'>\n");
sb.append("<input type='text' name='jump to:'>\n");
sb.append("<input type='SUBMIT' name='directive' value='save to:'>\n");
sb.append("<input type='text' name='save to:'>\n");
sb.append(" </TD></TR>\n");
sb.append(" <TR><TD COLSPAN='" + ((showQuestionNum) ? 3 : 2 ) + "' ALIGN='center'>\n");
sb.append("<input type='SUBMIT' name='directive' value='reload questions'>\n");
sb.append("<input type='SUBMIT' name='directive' value='show Errors'>\n");
sb.append("<input type='SUBMIT' name='directive' value='show XML'>\n");
sb.append("<input type='SUBMIT' name='directive' value='evaluate expr:'>\n");
sb.append("<input type='text' name='evaluate expr:'>\n");
sb.append(" </TD></TR>\n");
}
sb.append(showOptions());
sb.append("</TABLE>\n");
// Complete printout of what's been collected per node
if (debug) {
sb.append("<hr>\n");
sb.append("<H4>CURRENT QUESTION(s)</H4>\n");
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' BORDER='1'>\n");
questionNames = triceps.getQuestions();
while(questionNames.hasMoreElements()) {
Node n = (Node) questionNames.nextElement();
sb.append("<TR>" +
"<TD>" + Node.encodeHTML(n.getQuestionRef(),true) + "</TD>" +
"<TD><B>" + Node.encodeHTML(triceps.toString(n,true),true) + "</B></TD>" +
"<TD>" + Datum.TYPES[n.getDatumType()] + "</TD>" +
"<TD>" + Node.encodeHTML(n.getName(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getConcept(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getDependencies(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getActionTypeField(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getAction(),true) + "</TD>" +
"</TR>\n");
}
sb.append("</TABLE>\n");
sb.append("<hr>\n");
sb.append("<H4>EVIDENCE AREA</H4>\n");
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' BORDER='1'>\n");
for (int i = triceps.size()-1; i >= 0; i--) {
Node n = triceps.getNode(i);
if (!triceps.isSet(n))
continue;
sb.append("<TR>" +
"<TD>" + (i + 1) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getQuestionRef(),true) + "</TD>" +
"<TD><B>" + Node.encodeHTML(triceps.toString(n,true),true) + "</B></TD>" +
"<TD>" + Datum.TYPES[n.getDatumType()] + "</TD>" +
"<TD>" + Node.encodeHTML(n.getName(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getConcept(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getDependencies(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getActionTypeField(),true) + "</TD>" +
"<TD>" + Node.encodeHTML(n.getAction(),true) + "</TD>" +
"</TR>\n");
}
sb.append("</TABLE>\n");
}
return sb.toString();
}
private String showOptions() {
if (developerMode || debug) {
StringBuffer sb = new StringBuffer();
sb.append(" <TR><TD COLSPAN='" + ((showQuestionNum) ? 3 : 2 ) + "' ALIGN='center'>\n");
sb.append(" developerMode<input type='checkbox' name='developerMode' value='on'" + ((developerMode) ? " CHECKED" : "") + ">\n");
sb.append(" showQuestionNum<input type='checkbox' name='showQuestionNum' value='on'" + ((showQuestionNum) ? " CHECKED" : "") + ">\n");
sb.append(" debug<input type='checkbox' name='DEBUG' value='on'" + ((debug) ? " CHECKED" : "") + ">\n");
sb.append("</TD></TR>\n");
return sb.toString();
}
else
return "";
}
}
| false | true | private String processDirective() {
boolean ok = true;
int gotoMsg = Triceps.OK;
StringBuffer sb = new StringBuffer();
StringBuffer schedules = new StringBuffer();
StringBuffer suspendedInterviews = new StringBuffer();
// get the POSTed directive (start, back, next, help, suspend, etc.) - default is opening screen
if (directive == null || "select new interview".equals(directive)) {
/* read list of available schedules from file */
BufferedReader br = Triceps.getReader(scheduleList, scheduleSrcDir);
if (br == null) {
sb.append("<B>Unable to find '" + scheduleList + "'</B><HR>");
}
else {
try {
int count = 0;
int line=0;
String fileLine;
String src;
while ((fileLine = br.readLine()) != null) {
++line;
if (fileLine.startsWith("COMMENT"))
continue;
try {
StringTokenizer schedule = new StringTokenizer(fileLine,"\t");
String title = schedule.nextToken();
String fileLoc = schedule.nextToken();
if (title == null || fileLoc == null)
continue;
/* Test whether these files exist */
Reader target = Triceps.getReader(fileLoc,scheduleSrcDir);
if (target == null) {
sb.append("Unable to access file '" + fileLoc + "'");
}
else {
try { target.close(); } catch (Exception e) {}
++count;
schedules.append(" <option value='" + Node.encodeHTML(fileLoc) + "'>" + Node.encodeHTML(title) + "</option>\n");
}
}
catch (NullPointerException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (NoSuchElementException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (Throwable t) {}
}
}
catch(IOException e) {
sb.append("Error reading from " + scheduleList);
}
catch (Throwable t) {}
finally {
if (br != null) {
try { br.close(); } catch (Throwable t) { }
}
}
/* Now build the list of uncompleted interviews */
try {
File dir = new File(workingFilesDir);
if (dir.isDirectory() && dir.canRead()) {
String[] files = dir.list();
int count=0;
for (int i=0;i<files.length;++i) {
try {
File f = new File(files[i]);
if (!f.isDirectory()) {
if (count == 0) {
suspendedInterviews.append("<select name='RestoreSuspended'>\n <option value=''></option>\n");
}
suspendedInterviews.append(" <option value='" + files[i] + "'>" + files[i] + "</option>\n");
++count;
}
}
catch (Throwable t) {
System.out.println(t.getMessage());
}
}
if (count > 0) {
suspendedInterviews.append("</select><BR>");
}
else {
suspendedInterviews.append(" ");
}
}
else {
System.out.println("can't read from dir " + dir.toString());
}
}
catch(Throwable t) {
System.out.println(t.getMessage());
}
}
/* Now construct splash screen */
sb.append("<TABLE CELLPADDING='2' CELLSPACING='2' BORDER='1'>\n");
sb.append("<TR><TD>Please select an interview/questionnaire from the pull-down list: </TD>\n");
sb.append(" <TD><select name='schedule'>\n");
sb.append(schedules);
sb.append(" </select></TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='START'></TD>\n");
sb.append("</TR>\n");
sb.append("<TR><TD>OR, restore an interview/questionnaire in progress: </TD>\n");
sb.append(" <TD>" + suspendedInterviews +
((developerMode) ? "<input type='text' name='RESTORE'>" : "") +
"</TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='RESTORE'></TD>\n");
sb.append(showOptions());
sb.append("</TABLE>\n");
return sb.toString();
}
else if (directive.equals("START")) {
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(req.getParameter("schedule"),scheduleSrcDir);
if (!ok) {
try {
this.doGet(req,res);
}
catch (ServletException e) {
}
catch (IOException e) {
}
return sb.toString();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("RESTORE")) {
String restore;
restore = req.getParameter("RESTORE");
if (restore == null || restore.trim().equals("")) {
restore = req.getParameter("RestoreSuspended");
}
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(restore,workingFilesDir);
if (!ok) {
directive = null; // so that processDirective() will select new interview
return "<B>Unable to find or access schedule '" + restore + "'</B><HR>" +
processDirective();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("jump to:")) {
gotoMsg = triceps.gotoNode(req.getParameter("jump to:"));
ok = (gotoMsg == Triceps.OK);
// ask this question
}
else if (directive.equals("restart (clean)")) { // restart from scratch
ok = triceps.resetEvidence();
ok = ok && ((gotoMsg = triceps.gotoFirst()) == Triceps.OK); // don't proceed if prior error
// ask first question
}
else if (directive.equals("reload questions")) { // debugging option
ok = triceps.reloadSchedule();
if (ok) {
sb.append("<B>Schedule restored successfully</B><HR>\n");
}
// re-ask current question
}
else if (directive.equals("save to:")) {
String name = req.getParameter("save to:");
String file = workingFilesDir + name;
ok = triceps.toTSV(file);
if (ok) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
else if (directive.equals("evaluate expr:")) {
String expr = req.getParameter("evaluate expr:");
if (expr != null) {
Datum datum = triceps.parser.parse(triceps.evidence, expr);
sb.append("<TABLE WIDTH='100%' CELLPADDING='2' CELLSPACING='1' BORDER=1>\n");
sb.append("<TR><TD>Equation</TD><TD><B>" + Node.encodeHTML(expr) + "</B></TD><TD>Type</TD><TD><B>" + Datum.TYPES[datum.type()] + "</B></TD></TR>\n");
sb.append("<TR><TD>String</TD><TD><B>" + Node.encodeHTML(datum.stringVal(true)) + "</B></TD><TD>boolean</TD><TD><B>" + datum.booleanVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>double</TD><TD><B>" + datum.doubleVal() + "</B></TD><TD>long</TD><TD><B>" + datum.longVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>date</TD><TD><B>" + datum.dateVal() + "</B></TD><TD>month</TD><TD><B>" + datum.monthVal() + "</B></TD></TR>\n");
sb.append("</TABLE>\n");
if (triceps.parser.hasErrors()) {
Vector v = triceps.parser.getErrors();
sb.append("<B>There were errors parsing that equation:</B><BR>");
for (int j=0;j<v.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append(Node.encodeHTML((String) v.elementAt(j)));
}
}
}
}
else if (directive.equals("show XML")) {
sb.append("<B>Use 'Show Source' to see data in Schedule as XML</B><BR>\n");
sb.append("<!--\n" + triceps.toXML() + "\n-->\n");
sb.append("<HR>\n");
}
else if (directive.equals("show Errors")) {
Vector pes = triceps.collectParseErrors();
if (pes.size() == 0) {
sb.append("<B>No errors were found</B><HR>");
}
else {
Vector errs;
for (int i=0;i<pes.size();++i) {
ParseError pe = (ParseError) pes.elementAt(i);
Node n = pe.getNode();
if (i == 0) {
sb.append("<FONT color='red'>The following errors were found in file <B>" + Node.encodeHTML(n.getSourceFile()) + "</B></FONT><BR>\n");
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' border='1'>\n");
sb.append("<TR><TD>line#</TD><TD>name</TD><TD>Dependencies</TD><TD><B>Dependency Errors</B></TD><TD>Action Type</TD><TD>Action</TD><TD><B>Action Errors</B></TD><TD><B>Other Errors</B></TD></TR>\n");
}
sb.append("\n<TR><TD>" + n.getSourceLine() + "</TD><TD>" + Node.encodeHTML(n.getQuestionRef(),true) + "</TD>");
sb.append("\n<TD>" + Node.encodeHTML(pe.getDependencies(),true) + "</TD>\n<TD>");
errs = pe.getDependenciesErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>" + Node.ACTION_TYPES[n.getActionType()] + "</TD><TD>" + Node.encodeHTML(pe.getAction(),true) + "</TD><TD>");
errs = pe.getActionErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>");
errs = pe.getNodeErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + (String) errs.elementAt(j)); // XXX: don't Node.encodeHTML() these, since pre-processed within Node
}
}
sb.append("</TD></TR>");
}
sb.append("</TABLE><HR>\n");
}
}
else if (directive.equals("next")) {
// store current answer(s)
Enumeration questionNames = triceps.getQuestions();
while(questionNames.hasMoreElements()) {
Node q = (Node) questionNames.nextElement();
boolean status;
status = triceps.storeValue(q, req.getParameter(q.getName()),refuseToAnswerCurrent);
ok = status && ok;
}
// goto next
ok = ok && ((gotoMsg = triceps.gotoNext()) == Triceps.OK); // don't proceed if prior errors - e.g. unanswered questions
if (gotoMsg == Triceps.AT_END) {
// save the file, but still give the option to go back and change answers
boolean savedOK;
String name = Datum.format(triceps.getStartTime(),Datum.DATE,Datum.TIME_MASK);
String file = completedFilesDir + name;
sb.append("<B>Thank you, the interview is completed</B><BR>\n");
savedOK = triceps.toTSV(file);
ok = savedOK && ok;
if (savedOK) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
// don't goto next if errors
// ask question
}
else if (directive.equals("previous")) {
// don't store current
// goto previous
gotoMsg = triceps.gotoPrevious();
ok = ok && (gotoMsg == Triceps.OK);
// ask question
}
if (!ok) {
Enumeration errs = triceps.getErrors();
if (errs.hasMoreElements()) {
while (errs.hasMoreElements()) {
sb.append("<B>" + Node.encodeHTML((String) errs.nextElement()) + "</B><BR>\n");
}
}
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.hasRuntimeErrors()) {
sb.append("<B>Please answer the question(s) listed in <FONT color='red'>RED</FONT> before proceeding</B><BR>\n");
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
sb.append("<HR>\n");
}
if (firstFocus == null) {
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.focusable()) {
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
}
sb.append(queryUser());
return sb.toString();
}
| private String processDirective() {
boolean ok = true;
int gotoMsg = Triceps.OK;
StringBuffer sb = new StringBuffer();
StringBuffer schedules = new StringBuffer();
StringBuffer suspendedInterviews = new StringBuffer();
// get the POSTed directive (start, back, next, help, suspend, etc.) - default is opening screen
if (directive == null || "select new interview".equals(directive)) {
/* read list of available schedules from file */
BufferedReader br = Triceps.getReader(scheduleList, scheduleSrcDir);
if (br == null) {
sb.append("<B>Unable to find '" + scheduleList + "'</B><HR>");
}
else {
try {
int count = 0;
int line=0;
String fileLine;
String src;
while ((fileLine = br.readLine()) != null) {
++line;
if (fileLine.startsWith("COMMENT"))
continue;
try {
StringTokenizer schedule = new StringTokenizer(fileLine,"\t");
String title = schedule.nextToken();
String fileLoc = schedule.nextToken();
if (title == null || fileLoc == null)
continue;
/* Test whether these files exist */
Reader target = Triceps.getReader(fileLoc,scheduleSrcDir);
if (target == null) {
sb.append("Unable to access file '" + fileLoc + "'");
}
else {
try { target.close(); } catch (Exception e) {}
++count;
schedules.append(" <option value='" + Node.encodeHTML(fileLoc) + "'>" + Node.encodeHTML(title) + "</option>\n");
}
}
catch (NullPointerException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (NoSuchElementException e) {
sb.append("Error tokenizing schedule list '" + scheduleList + "' on line " + line + ": " + e);
}
catch (Throwable t) {}
}
}
catch(IOException e) {
sb.append("Error reading from " + scheduleList);
}
catch (Throwable t) {}
finally {
if (br != null) {
try { br.close(); } catch (Throwable t) { }
}
}
/* Now build the list of uncompleted interviews */
try {
File dir = new File(workingFilesDir);
if (dir.isDirectory() && dir.canRead()) {
String[] files = dir.list();
int count=0;
for (int i=0;i<files.length;++i) {
try {
File f = new File(files[i]);
if (!f.isDirectory()) {
if (count == 0) {
suspendedInterviews.append("<select name='RestoreSuspended'>\n <option value=''></option>\n");
}
suspendedInterviews.append(" <option value='" + files[i] + "'>" + files[i] + "</option>\n");
++count;
}
}
catch (Throwable t) {
System.out.println(t.getMessage());
}
}
if (count > 0) {
suspendedInterviews.append("</select><BR>");
}
else {
suspendedInterviews.append(" ");
}
}
else {
System.out.println("can't read from dir " + dir.toString());
}
}
catch(Throwable t) {
System.out.println(t.getMessage());
}
}
/* Now construct splash screen */
sb.append("<TABLE CELLPADDING='2' CELLSPACING='2' BORDER='1'>\n");
sb.append("<TR><TD>Please select an interview/questionnaire from the pull-down list: </TD>\n");
sb.append(" <TD><select name='schedule'>\n");
sb.append(schedules);
sb.append(" </select></TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='START'></TD>\n");
sb.append("</TR>\n");
sb.append("<TR><TD>OR, restore an interview/questionnaire in progress: </TD>\n");
sb.append(" <TD>" + suspendedInterviews +
((developerMode) ? "<input type='text' name='RESTORE'>" : "") +
"</TD>\n");
sb.append(" <TD><input type='SUBMIT' name='directive' value='RESTORE'></TD>\n");
sb.append(showOptions());
sb.append("</TABLE>\n");
return sb.toString();
}
else if (directive.equals("START")) {
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(req.getParameter("schedule"),scheduleSrcDir);
if (!ok) {
try {
this.doGet(req,res);
}
catch (ServletException e) {
}
catch (IOException e) {
}
return sb.toString();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("RESTORE")) {
String restore;
restore = req.getParameter("RESTORE");
if (restore == null || restore.trim().equals("")) {
restore = req.getParameter("RestoreSuspended");
}
// load schedule
triceps = new Triceps(scheduleSrcDir, workingFilesDir, completedFilesDir);
ok = triceps.setSchedule(restore,workingFilesDir);
if (!ok) {
directive = null; // so that processDirective() will select new interview
return "<B>Unable to find or access schedule '" + restore + "'</B><HR>" +
processDirective();
}
ok = ok && ((gotoMsg = triceps.gotoStarting()) == Triceps.OK); // don't proceed if prior error
// ask question
}
else if (directive.equals("jump to:")) {
gotoMsg = triceps.gotoNode(req.getParameter("jump to:"));
ok = (gotoMsg == Triceps.OK);
// ask this question
}
else if (directive.equals("restart (clean)")) { // restart from scratch
ok = triceps.resetEvidence();
ok = ok && ((gotoMsg = triceps.gotoFirst()) == Triceps.OK); // don't proceed if prior error
// ask first question
}
else if (directive.equals("reload questions")) { // debugging option
ok = triceps.reloadSchedule();
if (ok) {
sb.append("<B>Schedule restored successfully</B><HR>\n");
}
// re-ask current question
}
else if (directive.equals("save to:")) {
String name = req.getParameter("save to:");
String file = workingFilesDir + name;
ok = triceps.toTSV(file);
if (ok) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
else if (directive.equals("evaluate expr:")) {
String expr = req.getParameter("evaluate expr:");
if (expr != null) {
Datum datum = triceps.parser.parse(triceps.evidence, expr);
sb.append("<TABLE WIDTH='100%' CELLPADDING='2' CELLSPACING='1' BORDER=1>\n");
sb.append("<TR><TD>Equation</TD><TD><B>" + Node.encodeHTML(expr) + "</B></TD><TD>Type</TD><TD><B>" + Datum.TYPES[datum.type()] + "</B></TD></TR>\n");
sb.append("<TR><TD>String</TD><TD><B>" + Node.encodeHTML(datum.stringVal(true)) + "</B></TD><TD>boolean</TD><TD><B>" + datum.booleanVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>double</TD><TD><B>" + datum.doubleVal() + "</B></TD><TD>long</TD><TD><B>" + datum.longVal() + "</B></TD></TR>\n");
sb.append("<TR><TD>date</TD><TD><B>" + datum.dateVal() + "</B></TD><TD>month</TD><TD><B>" + datum.monthVal() + "</B></TD></TR>\n");
sb.append("</TABLE>\n");
if (triceps.parser.hasErrors()) {
Vector v = triceps.parser.getErrors();
sb.append("<B>There were errors parsing that equation:</B><BR>");
for (int j=0;j<v.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append(Node.encodeHTML((String) v.elementAt(j)));
}
}
}
}
else if (directive.equals("show XML")) {
sb.append("<B>Use 'Show Source' to see data in Schedule as XML</B><BR>\n");
sb.append("<!--\n" + triceps.toXML() + "\n-->\n");
sb.append("<HR>\n");
}
else if (directive.equals("show Errors")) {
Vector pes = triceps.collectParseErrors();
if (pes.size() == 0) {
sb.append("<B>No errors were found</B><HR>");
}
else {
Vector errs;
for (int i=0;i<pes.size();++i) {
ParseError pe = (ParseError) pes.elementAt(i);
Node n = pe.getNode();
if (i == 0) {
sb.append("<FONT color='red'>The following errors were found in file <B>" + Node.encodeHTML(n.getSourceFile()) + "</B></FONT><BR>\n");
sb.append("<TABLE CELLPADDING='2' CELLSPACING='1' WIDTH='100%' border='1'>\n");
sb.append("<TR><TD>line#</TD><TD>name</TD><TD>Dependencies</TD><TD><B>Dependency Errors</B></TD><TD>Action Type</TD><TD>Action</TD><TD><B>Action Errors</B></TD><TD><B>Other Errors</B></TD></TR>\n");
}
sb.append("\n<TR><TD>" + n.getSourceLine() + "</TD><TD>" + Node.encodeHTML(n.getQuestionRef(),true) + "</TD>");
sb.append("\n<TD>" + Node.encodeHTML(pe.getDependencies(),true) + "</TD>\n<TD>");
errs = pe.getDependenciesErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>" + Node.ACTION_TYPES[n.getActionType()] + "</TD><TD>" + Node.encodeHTML(pe.getAction(),true) + "</TD><TD>");
errs = pe.getActionErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + Node.encodeHTML((String) errs.elementAt(j),true));
}
}
sb.append("</TD>\n<TD>");
errs = pe.getNodeErrors();
if (errs.size() == 0) {
sb.append(" ");
}
else {
for (int j=0;j<errs.size();++j) {
if (j > 0)
sb.append("<BR>");
sb.append("" + (j+1) + ") " + (String) errs.elementAt(j)); // XXX: don't Node.encodeHTML() these, since pre-processed within Node
}
}
sb.append("</TD></TR>");
}
sb.append("</TABLE><HR>\n");
}
}
else if (directive.equals("next")) {
// store current answer(s)
Enumeration questionNames = triceps.getQuestions();
while(questionNames.hasMoreElements()) {
Node q = (Node) questionNames.nextElement();
boolean status;
status = triceps.storeValue(q, req.getParameter(q.getName()),refuseToAnswerCurrent);
ok = status && ok;
}
// goto next
ok = ok && ((gotoMsg = triceps.gotoNext()) == Triceps.OK); // don't proceed if prior errors - e.g. unanswered questions
if (gotoMsg == Triceps.AT_END) {
// save the file, but still give the option to go back and change answers
boolean savedOK;
String name = Datum.format(triceps.getStartTime(),Datum.DATE,Datum.TIME_MASK);
String file = completedFilesDir + name;
sb.append("<B>Thank you, the interview is completed</B><BR>\n");
savedOK = triceps.toTSV(file);
ok = savedOK && ok;
if (savedOK) {
sb.append("<B>Interview saved successfully as " + Node.encodeHTML(name) + " (" + Node.encodeHTML(file) + ")</B><HR>\n");
}
}
// don't goto next if errors
// ask question
}
else if (directive.equals("previous")) {
// don't store current
// goto previous
gotoMsg = triceps.gotoPrevious();
ok = ok && (gotoMsg == Triceps.OK);
// ask question
}
// if (!ok)
{
/* should do this regardless of OK status? Might catch interesting parsing errors? */
int errCount = 0;
Enumeration errs = triceps.getErrors();
if (errs.hasMoreElements()) {
while (errs.hasMoreElements()) {
++errCount;
sb.append("<B>" + Node.encodeHTML((String) errs.nextElement()) + "</B><BR>\n");
}
}
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.hasRuntimeErrors()) {
++errCount;
sb.append("<B>Please answer the question(s) listed in <FONT color='red'>RED</FONT> before proceeding</B><BR>\n");
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
if (errCount > 0) {
sb.append("<HR>\n");
}
}
if (firstFocus == null) {
Enumeration nodes = triceps.getQuestions();
while (nodes.hasMoreElements()) {
Node n = (Node) nodes.nextElement();
if (n.focusable()) {
firstFocus = Node.encodeHTML(n.getName());
break;
}
}
}
sb.append(queryUser());
return sb.toString();
}
|
diff --git a/src/pe/edu/pucp/resource/BookReservationSerializer.java b/src/pe/edu/pucp/resource/BookReservationSerializer.java
index 1793d88..af7b975 100644
--- a/src/pe/edu/pucp/resource/BookReservationSerializer.java
+++ b/src/pe/edu/pucp/resource/BookReservationSerializer.java
@@ -1,110 +1,111 @@
package pe.edu.pucp.resource;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.restlet.data.Form;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import pe.edu.pucp.dao.LibraryServiceDAO;
import pe.edu.pucp.model.BaseSerializer;
import pe.edu.pucp.model.Book;
import pe.edu.pucp.model.BookReservation;
import pe.edu.pucp.model.Student;
import com.googlecode.objectify.Key;
/**
* @author cgavidia
*
*/
public class BookReservationSerializer extends BaseSerializer<BookReservation> {
public static final Logger LOG = Logger
.getLogger(BookReservationSerializer.class.getName());
public static final String CODE_ELEMENT = "codigo";
private static final String LIST_ROOT_ELEMENT = "prestamos";
private static final String STUDENT_ID_ELEMENT = "codigoEstudiante";
private static final String BOOK_ID_ELEMENT = "codigoLibro";
private static final String DATE_PATTERN = "dd-MM-yyyy hh:mm";
private static final String DATE_ELEMENT = "fechaHora";
private static final String RESERVATION_ELEMENT = "prestamo";
private BookReservation entity;
private SimpleDateFormat formatter = new SimpleDateFormat(DATE_PATTERN);
@Override
public BookReservation getEntity() {
return entity;
}
@Override
public String getRootListElement() {
return LIST_ROOT_ELEMENT;
}
@Override
public void intializeProperties(Form form) {
if (form.getFirstValue(CODE_ELEMENT) != null) {
entity.setId(Long.parseLong(form.getFirstValue(CODE_ELEMENT)));
}
entity.setStudent(new Key<Student>(Student.class, Long.parseLong(form
.getFirstValue(STUDENT_ID_ELEMENT))));
entity.setBook(new Key<Book>(Book.class, Long.parseLong(form
.getFirstValue(BOOK_ID_ELEMENT))));
try {
entity.setDate(formatter.parse(form.getFirstValue(DATE_ELEMENT)));
} catch (ParseException e) {
LOG.log(Level.SEVERE, "Error parsing", e);
entity.setDate(null);
}
}
@Override
public Element toXml(Document document) {
Element reservationElement = document
.createElement(RESERVATION_ELEMENT);
Element codeElement = document.createElement(CODE_ELEMENT);
codeElement.appendChild(document.createTextNode(entity.getId()
.toString()));
reservationElement.appendChild(codeElement);
Student student = new LibraryServiceDAO<Student>(Student.class)
.get(entity.getStudent());
Book book = new LibraryServiceDAO<Book>(Book.class).get(entity
.getBook());
reservationElement.appendChild(new StudentSerializer(student)
.toXml(document));
reservationElement
.appendChild(new BookSerializer(book).toXml(document));
Element dateElement = document.createElement(DATE_ELEMENT);
- dateElement.appendChild(document.createTextNode(formatter.format(entity
- .getDate())));
+ dateElement.appendChild(document
+ .createTextNode(entity.getDate() != null ? formatter
+ .format(entity.getDate()) : ""));
reservationElement.appendChild(dateElement);
return reservationElement;
}
@Override
public void setEntity(BookReservation entity) {
this.entity = entity;
}
@Override
public void setId(Long id) {
entity.setId(id);
}
@Override
public String getCodeElement() {
return CODE_ELEMENT;
}
}
| true | true | public Element toXml(Document document) {
Element reservationElement = document
.createElement(RESERVATION_ELEMENT);
Element codeElement = document.createElement(CODE_ELEMENT);
codeElement.appendChild(document.createTextNode(entity.getId()
.toString()));
reservationElement.appendChild(codeElement);
Student student = new LibraryServiceDAO<Student>(Student.class)
.get(entity.getStudent());
Book book = new LibraryServiceDAO<Book>(Book.class).get(entity
.getBook());
reservationElement.appendChild(new StudentSerializer(student)
.toXml(document));
reservationElement
.appendChild(new BookSerializer(book).toXml(document));
Element dateElement = document.createElement(DATE_ELEMENT);
dateElement.appendChild(document.createTextNode(formatter.format(entity
.getDate())));
reservationElement.appendChild(dateElement);
return reservationElement;
}
| public Element toXml(Document document) {
Element reservationElement = document
.createElement(RESERVATION_ELEMENT);
Element codeElement = document.createElement(CODE_ELEMENT);
codeElement.appendChild(document.createTextNode(entity.getId()
.toString()));
reservationElement.appendChild(codeElement);
Student student = new LibraryServiceDAO<Student>(Student.class)
.get(entity.getStudent());
Book book = new LibraryServiceDAO<Book>(Book.class).get(entity
.getBook());
reservationElement.appendChild(new StudentSerializer(student)
.toXml(document));
reservationElement
.appendChild(new BookSerializer(book).toXml(document));
Element dateElement = document.createElement(DATE_ELEMENT);
dateElement.appendChild(document
.createTextNode(entity.getDate() != null ? formatter
.format(entity.getDate()) : ""));
reservationElement.appendChild(dateElement);
return reservationElement;
}
|
diff --git a/test/src/org/apache/ace/configurator/ConfiguratorTest.java b/test/src/org/apache/ace/configurator/ConfiguratorTest.java
index 3b36f6c3..5398b464 100644
--- a/test/src/org/apache/ace/configurator/ConfiguratorTest.java
+++ b/test/src/org/apache/ace/configurator/ConfiguratorTest.java
@@ -1,348 +1,356 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ace.configurator;
import static org.apache.ace.test.utils.TestUtils.UNIT;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Dictionary;
import java.util.Properties;
import org.apache.ace.test.utils.FileUtils;
import org.apache.ace.test.utils.TestUtils;
import org.osgi.framework.BundleContext;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.log.LogService;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class ConfiguratorTest {
private Configurator m_configurator;
private File m_configDir;
private ConfigurationAdmin m_configAdmin;
@BeforeMethod(alwaysRun = true)
protected void setUp() throws Exception {
setUp(false);
}
/**
* Sets up the environment for testing.
* @param reconfig Indicates whether or not the configurator should use reconfiguration.
*/
protected void setUp(boolean reconfig) throws Exception {
m_configAdmin = new MockConfigAdmin();
m_configDir = FileUtils.createTempFile(null);
m_configDir.mkdir();
m_configurator = new Configurator(m_configDir, 400, reconfig);
TestUtils.configureObject(m_configurator, ConfigurationAdmin.class, m_configAdmin);
TestUtils.configureObject(m_configurator, LogService.class);
TestUtils.configureObject(m_configurator, BundleContext.class, TestUtils.createMockObjectAdapter(BundleContext.class, new Object() {
@SuppressWarnings("unused")
public String getProperty(String key) {
return "contextProp";
}
}));
m_configurator.start();
}
/**
* save the properties into a configuration file the configurator can read.
* The file is first created and then moved to make sure the configuration doesn't read an empty file
*/
private void saveConfiguration(String servicePid, Properties configuration) {
saveConfiguration(servicePid, null, configuration);
}
/**
* save the properties into a configuration file stored in a directory reflecting the factory pid
*/
private void saveConfiguration(String servicePid, String factoryPid, Properties configuration) {
OutputStream fileOutputStream = null;
File outFile = null;
try {
outFile = FileUtils.createTempFile(null);
fileOutputStream = new FileOutputStream(outFile);
configuration.store(fileOutputStream, null);
} catch (IOException ioe) {
// the test will fail, ignore this.
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
}
catch (IOException e) {
// nothing we can do
}
}
}
if (outFile != null) {
if (factoryPid == null) {
- outFile.renameTo(new File(m_configDir, servicePid+".cfg"));
+ File dest = new File(m_configDir, servicePid + ".cfg");
+ if (dest.exists()) {
+ dest.delete();
+ }
+ outFile.renameTo(dest);
}
else {
File file = new File(m_configDir, factoryPid);
file.mkdirs();
- outFile.renameTo(new File(file, servicePid+".cfg"));
+ File dest = new File(file, servicePid + ".cfg");
+ if (dest.exists()) {
+ dest.delete();
+ }
+ outFile.renameTo(dest);
}
}
}
// remove a created configuration file
private void removeConfiguration(String servicePid) {
removeConfiguration(servicePid, null);
}
private void removeConfiguration(String servicePid, String factoryPid) {
if (factoryPid != null) {
new File(m_configDir, factoryPid + File.separator + servicePid + ".cfg").delete();
} else {
new File(m_configDir, servicePid + ".cfg").delete();
}
}
// set some standard properties for testing
private Properties createProperties() {
Properties props = new Properties();
props.put("test", "value1");
props.put("test2", "value2");
return props;
}
// add a configuration
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testAddConfiguration() {
Properties initialConfiguration = createProperties();
saveConfiguration("test-add", initialConfiguration);
Dictionary configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(createProperties()) : "Configuration content is unexpected";
}
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testAddFactoryConfiguration() {
Properties props = createProperties();
saveConfiguration("test-add", "testFactory", props);
Dictionary configuration = getAndWaitForConfiguration(props);
assert configuration != null : "No configuration received from configurator";
assert "testFactory_test-add".equals(configuration.remove("factory.instance.pid")) : "Incorrect factory instance pid was added to the configuration";
assert configuration.equals(createProperties()) : "Configuration content is unexpected";
}
// remove a configuration
@Test(groups = { UNIT })
public void testRemoveFactoryConfiguration() {
Properties props = createProperties();
saveConfiguration("test-remove", "testFactory", props);
getAndWaitForConfiguration(props);
removeConfiguration("test-remove", "testFactory");
// after some processing time, we should get a message that the configuration is now removed.
long startTimeMillis = System.currentTimeMillis();
boolean isDeleted = false;
try {
while (!isDeleted && (System.currentTimeMillis() < startTimeMillis + 2000)) {
isDeleted = ((MockConfiguration) m_configAdmin.getConfiguration("")).isDeleted();
if (!isDeleted) {
Thread.sleep(100);
}
}
} catch (InterruptedException ie) {
// not much we can do
}
catch (IOException e) {
// cannot come from our mock config admin
}
assert isDeleted : "The configuration is not removed as expected";
}
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testPropertySubstitution( ) {
Properties initialConfiguration = createProperties();
initialConfiguration.put("var", "value");
initialConfiguration.put("subst", "${var}");
saveConfiguration("test-subst", initialConfiguration);
Dictionary configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.get("subst").equals(configuration.get("var")) : "Substitution failed";
}
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testPropertySubstitutionFromContext() {
Properties initialConfiguration = createProperties();
initialConfiguration.put("subst", "${var}");
saveConfiguration("test-subst", initialConfiguration);
Dictionary configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.get("subst") != null : "Substitution failed";
}
// update a configuration, only adding a key (this is allowed in all cases)
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testChangeConfigurationUsingNewKey() {
Properties initialConfiguration = createProperties();
saveConfiguration("test-change", initialConfiguration);
Dictionary configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(initialConfiguration) : "Configuration content not expected. Was expecting " + initialConfiguration.size() + " but got " + configuration.size();
initialConfiguration.put("anotherKey","anotherValue");
saveConfiguration("test-change", initialConfiguration);
// now the configuration should be updated
configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(initialConfiguration) : "Configuration content not expected. Was expecting " + initialConfiguration.size() + " but got " + configuration.size();
}
// update a configuration, changing an already existing key, not using reconfiguration
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testChangeConfigurationUsingSameKeyNoReconfigure() {
Properties configurationValues = createProperties();
Properties initialConfigurationValues = new Properties();
initialConfigurationValues.putAll(configurationValues);
saveConfiguration("test-change", configurationValues);
Dictionary configuration = getAndWaitForConfiguration(configurationValues);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(configurationValues) : "Configuration content not expected. Was expecting " + configurationValues.size() + " but got " + configuration.size();
configurationValues.put("test","value42");
saveConfiguration("test-change", configurationValues);
// The update should have been ignored, and the old values should still be present.
configuration = getAndWaitForConfiguration(configurationValues);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(initialConfigurationValues) : "Configuration content not expected. Was expecting " + configurationValues.size() + " but got " + configuration.size();
}
// update a configuration, changing an already existing key, using reconfiguration
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testChangeConfigurationUsingSameKeyWithReconfigure() throws Exception {
setUp(true); // Instruct the configurator to reconfigure
Properties configurationValues = createProperties();
saveConfiguration("test-change", configurationValues);
Dictionary configuration = getAndWaitForConfiguration(configurationValues);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(configurationValues) : "Configuration content not expected. Was expecting " + configurationValues.size() + " but got " + configuration.size();
configurationValues.put("test","value42");
saveConfiguration("test-change", configurationValues);
// now the configuration should be updated
configuration = getAndWaitForConfiguration(configurationValues);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(configurationValues) : "Configuration content not expected. Was expecting " + configurationValues.size() + " but got " + configuration.size();
}
// remove a configuration
@SuppressWarnings("unchecked")
@Test(groups = { UNIT })
public void testRemoveConfiguration() {
Properties initialConfiguration = createProperties();
saveConfiguration("test-remove", initialConfiguration);
Dictionary configuration = getAndWaitForConfiguration(initialConfiguration);
assert configuration != null : "No configuration received from configurator";
assert configuration.equals(createProperties()) : "Configuration content is unexpected";
// ok, the configuration is done.
// now try to remove it.
removeConfiguration("test-remove");
// after some processing time, we should get a message that the configuration is now removed.
long startTimeMillis = System.currentTimeMillis();
boolean isDeleted = false;
try {
while (!isDeleted && (System.currentTimeMillis() < startTimeMillis + 2000)) {
isDeleted = ((MockConfiguration) m_configAdmin.getConfiguration("")).isDeleted();
if (!isDeleted) {
Thread.sleep(100);
}
}
} catch (InterruptedException ie) {
// not much we can do
}
catch (IOException e) {
// cannot come from our mock config admin
}
assert isDeleted : "The configuration is not removed as expected";
}
/**
* Get the configuration and if it not available yet wait for it.
* If there is still no configuration after the wait time,
* null is returned.
*/
@SuppressWarnings("unchecked")
public Dictionary getAndWaitForConfiguration(Dictionary expectedConfiguration) {
long startTimeMillis = System.currentTimeMillis();
// make sure we iterate at least once
Dictionary configuration = null;
try {
boolean success = false;
while (!success && (System.currentTimeMillis() < startTimeMillis + 2000)) {
configuration = m_configAdmin.getConfiguration("").getProperties();
if (configuration != null) {
synchronized(configuration) {
if (expectedConfiguration.equals(configuration)) {
success = true;
}
}
}
if (!success) {
Thread.sleep(100);
}
}
} catch (InterruptedException ie) {
// not much we can do
}
catch (IOException e) {
// cannot come from our mock config admin
}
return configuration;
}
@AfterMethod(alwaysRun = true)
public void tearDown() throws Exception {
m_configurator.stop();
FileUtils.removeDirectoryWithContent(m_configDir);
}
}
| false | true | private void saveConfiguration(String servicePid, String factoryPid, Properties configuration) {
OutputStream fileOutputStream = null;
File outFile = null;
try {
outFile = FileUtils.createTempFile(null);
fileOutputStream = new FileOutputStream(outFile);
configuration.store(fileOutputStream, null);
} catch (IOException ioe) {
// the test will fail, ignore this.
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
}
catch (IOException e) {
// nothing we can do
}
}
}
if (outFile != null) {
if (factoryPid == null) {
outFile.renameTo(new File(m_configDir, servicePid+".cfg"));
}
else {
File file = new File(m_configDir, factoryPid);
file.mkdirs();
outFile.renameTo(new File(file, servicePid+".cfg"));
}
}
}
| private void saveConfiguration(String servicePid, String factoryPid, Properties configuration) {
OutputStream fileOutputStream = null;
File outFile = null;
try {
outFile = FileUtils.createTempFile(null);
fileOutputStream = new FileOutputStream(outFile);
configuration.store(fileOutputStream, null);
} catch (IOException ioe) {
// the test will fail, ignore this.
} finally {
if (fileOutputStream != null) {
try {
fileOutputStream.close();
}
catch (IOException e) {
// nothing we can do
}
}
}
if (outFile != null) {
if (factoryPid == null) {
File dest = new File(m_configDir, servicePid + ".cfg");
if (dest.exists()) {
dest.delete();
}
outFile.renameTo(dest);
}
else {
File file = new File(m_configDir, factoryPid);
file.mkdirs();
File dest = new File(file, servicePid + ".cfg");
if (dest.exists()) {
dest.delete();
}
outFile.renameTo(dest);
}
}
}
|
diff --git a/src/org/tomahawk/libtomahawk/audio/AlbumArtSwipeAdapter.java b/src/org/tomahawk/libtomahawk/audio/AlbumArtSwipeAdapter.java
index b36f3358..bfb96bff 100644
--- a/src/org/tomahawk/libtomahawk/audio/AlbumArtSwipeAdapter.java
+++ b/src/org/tomahawk/libtomahawk/audio/AlbumArtSwipeAdapter.java
@@ -1,257 +1,257 @@
/* == This file is part of Tomahawk Player - <http://tomahawk-player.org> ===
*
* Copyright 2012, Enno Gottschalk <[email protected]>
*
* Tomahawk is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Tomahawk is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Tomahawk. If not, see <http://www.gnu.org/licenses/>.
*/
package org.tomahawk.libtomahawk.audio;
import org.tomahawk.libtomahawk.playlist.Playlist;
import org.tomahawk.tomahawk_android.R;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Parcelable;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.view.View;
import android.widget.ImageView;
/**
* @author Enno Gottschalk <[email protected]>
*
*/
public class AlbumArtSwipeAdapter extends PagerAdapter implements ViewPager.OnPageChangeListener {
private static final int FAKE_INFINITY_COUNT = 20000;
private Context mContext;
private int mFakeInfinityOffset;
private boolean mByUser;
private boolean mSwiped;
private ViewPager mViewPager;
private PlaybackService mPlaybackService;
private Playlist mPlaylist;
private int mCurrentViewPage = 0;
/**
* Constructs a new AlbumArtSwipeAdapter with the given list of AlbumArt
* images
*/
public AlbumArtSwipeAdapter(Context context, ViewPager viewPager) {
this.mContext = context;
this.mByUser = true;
this.mSwiped = false;
this.mViewPager = viewPager;
this.mViewPager.setAdapter(this);
this.mViewPager.setOnPageChangeListener(this);
}
/*
* (non-Javadoc)
*
* @see
* android.support.v4.view.PagerAdapter#instantiateItem(android.view.View,
* int)
*/
@Override
public Object instantiateItem(View collection, int position) {
ImageView albumArt = new ImageView(mContext);
if (mPlaylist != null) {
Bitmap albumArtBitmap = null;
if (mPlaylist.isRepeating()
&& mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum().getAlbumArt();
- else if (mPlaylist.peekTrackAtPos(position).getAlbum() != null)
+ else if (!mPlaylist.isRepeating() && mPlaylist.peekTrackAtPos(position).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos(position).getAlbum().getAlbumArt();
if (albumArtBitmap != null)
albumArt.setImageBitmap(albumArtBitmap);
else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
} else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
((ViewPager) collection).addView(albumArt);
return albumArt;
}
/*
* (non-Javadoc)
*
* @see android.support.v4.view.PagerAdapter#getCount()
*/
@Override
public int getCount() {
if (mPlaylist == null)
return 1;
if (mPlaylist.isRepeating())
return FAKE_INFINITY_COUNT;
return mPlaylist.getCount();
}
/**
* @return the offset by which the position should be shifted, when playlist is repeating
*/
public int getFakeInfinityOffset() {
return mFakeInfinityOffset;
}
/*
* (non-Javadoc)
*
* @see android.support.v4.view.PagerAdapter#destroyItem(android.view.View,
* int, java.lang.Object)
*/
@Override
public void destroyItem(View arg0, int arg1, Object arg2) {
((ViewPager) arg0).removeView((View) arg2);
}
/*
* (non-Javadoc)
*
* @see
* android.support.v4.view.PagerAdapter#isViewFromObject(android.view.View,
* java.lang.Object)
*/
@Override
public boolean isViewFromObject(View arg0, Object arg1) {
return arg0 == ((View) arg1);
}
/*
* (non-Javadoc)
*
* @see android.support.v4.view.PagerAdapter#saveState()
*/
@Override
public Parcelable saveState() {
return null;
}
/*
* (non-Javadoc)
*
* @see
* android.support.v4.view.PagerAdapter#getItemPosition(java.lang.Object)
*/
@Override
public int getItemPosition(Object object) {
return POSITION_NONE;
}
/** @param position to set the current item to
/** @param smoothScroll boolean to determine wether or not to show a scrolling animation */
public void setCurrentItem(int position, boolean smoothScroll) {
if (position != mCurrentViewPage) {
if (mPlaylist.isRepeating()) {
if (position == (mCurrentViewPage % mPlaylist.getCount()) + 1
|| ((mCurrentViewPage % mPlaylist.getCount()) == mPlaylist.getCount() - 1 && position == 0))
setCurrentToNextItem(smoothScroll);
else if (position == (mCurrentViewPage % mPlaylist.getCount()) - 1
|| ((mCurrentViewPage % mPlaylist.getCount()) == 0 && position == mPlaylist.getCount() - 1))
setCurrentToPreviousItem(smoothScroll);
else {
mViewPager.setCurrentItem(position, false);
}
} else {
mViewPager.setCurrentItem(position, smoothScroll);
}
mCurrentViewPage = mViewPager.getCurrentItem();
}
}
/** @param smoothScroll boolean to determine wether or not to show a scrolling animation */
public void setCurrentToNextItem(boolean smoothScroll) {
mViewPager.setCurrentItem(mCurrentViewPage + 1, smoothScroll);
}
/** @param smoothScroll boolean to determine wether or not to show a scrolling animation */
public void setCurrentToPreviousItem(boolean smoothScroll) {
mViewPager.setCurrentItem(mCurrentViewPage - 1, smoothScroll);
}
/**
* update the playlist of the AlbumArtSwipeAdapter to the given Playlist
*/
public void updatePlaylist() {
if (mPlaybackService != null)
mPlaylist = mPlaybackService.getCurrentPlaylist();
if (mPlaylist != null) {
mFakeInfinityOffset = mPlaylist.getCount() * ((FAKE_INFINITY_COUNT / 2) / mPlaylist.getCount());
setByUser(false);
if (mPlaylist.isRepeating()) {
setCurrentItem(mPlaylist.getPosition() + getFakeInfinityOffset(), false);
} else {
setCurrentItem(mPlaylist.getPosition(), false);
}
notifyDataSetChanged();
setByUser(true);
}
}
public boolean isByUser() {
return mByUser;
}
public void setByUser(boolean byUser) {
this.mByUser = byUser;
}
public boolean isSwiped() {
return mSwiped;
}
public void setSwiped(boolean isSwiped) {
this.mSwiped = isSwiped;
}
public boolean isPlaylistNull() {
return mPlaylist == null;
}
public void setPlaybackService(PlaybackService mPlaybackService) {
this.mPlaybackService = mPlaybackService;
updatePlaylist();
}
/* (non-Javadoc)
* @see android.support.v4.view.ViewPager.OnPageChangeListener#onPageSelected(int)
*/
@Override
public void onPageSelected(int arg0) {
if (mPlaybackService != null && isByUser()) {
setSwiped(true);
if (arg0 == mCurrentViewPage - 1)
mPlaybackService.previous();
else if (arg0 == mCurrentViewPage + 1)
mPlaybackService.next();
}
mCurrentViewPage = arg0;
}
/* (non-Javadoc)
* @see android.support.v4.view.ViewPager.OnPageChangeListener#onPageScrolled(int, float, int)
*/
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
}
/* (non-Javadoc)
* @see android.support.v4.view.ViewPager.OnPageChangeListener#onPageScrollStateChanged(int)
*/
@Override
public void onPageScrollStateChanged(int arg0) {
}
}
| true | true | public Object instantiateItem(View collection, int position) {
ImageView albumArt = new ImageView(mContext);
if (mPlaylist != null) {
Bitmap albumArtBitmap = null;
if (mPlaylist.isRepeating()
&& mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum().getAlbumArt();
else if (mPlaylist.peekTrackAtPos(position).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos(position).getAlbum().getAlbumArt();
if (albumArtBitmap != null)
albumArt.setImageBitmap(albumArtBitmap);
else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
} else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
((ViewPager) collection).addView(albumArt);
return albumArt;
}
| public Object instantiateItem(View collection, int position) {
ImageView albumArt = new ImageView(mContext);
if (mPlaylist != null) {
Bitmap albumArtBitmap = null;
if (mPlaylist.isRepeating()
&& mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos((position) % mPlaylist.getCount()).getAlbum().getAlbumArt();
else if (!mPlaylist.isRepeating() && mPlaylist.peekTrackAtPos(position).getAlbum() != null)
albumArtBitmap = mPlaylist.peekTrackAtPos(position).getAlbum().getAlbumArt();
if (albumArtBitmap != null)
albumArt.setImageBitmap(albumArtBitmap);
else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
} else
albumArt.setImageResource(R.drawable.no_album_art_placeholder);
((ViewPager) collection).addView(albumArt);
return albumArt;
}
|
diff --git a/ghana-national-xforms/src/main/java/org/motechproject/ghana/national/handlers/PNCBabyFormHandler.java b/ghana-national-xforms/src/main/java/org/motechproject/ghana/national/handlers/PNCBabyFormHandler.java
index 1371e006..f25a092f 100644
--- a/ghana-national-xforms/src/main/java/org/motechproject/ghana/national/handlers/PNCBabyFormHandler.java
+++ b/ghana-national-xforms/src/main/java/org/motechproject/ghana/national/handlers/PNCBabyFormHandler.java
@@ -1,79 +1,79 @@
package org.motechproject.ghana.national.handlers;
import org.motechproject.ghana.national.bean.PNCBabyForm;
import org.motechproject.ghana.national.domain.Constants;
import org.motechproject.ghana.national.domain.Facility;
import org.motechproject.ghana.national.domain.Patient;
import org.motechproject.ghana.national.service.ChildVisitService;
import org.motechproject.ghana.national.service.FacilityService;
import org.motechproject.ghana.national.service.PatientService;
import org.motechproject.ghana.national.service.StaffService;
import org.motechproject.ghana.national.service.request.PNCBabyRequest;
import org.motechproject.mobileforms.api.callbacks.FormPublishHandler;
import org.motechproject.model.MotechEvent;
import org.motechproject.mrs.model.MRSUser;
import org.motechproject.openmrs.advice.ApiSession;
import org.motechproject.openmrs.advice.LoginAsAdmin;
import org.motechproject.server.event.annotations.MotechListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class PNCBabyFormHandler implements FormPublishHandler {
private final Logger log = LoggerFactory.getLogger(this.getClass());
@Autowired
ChildVisitService childVisitService;
@Autowired
FacilityService facilityService;
@Autowired
StaffService staffService;
@Autowired
PatientService patientService;
@Override
@MotechListener(subjects = "form.validation.successful.NurseDataEntry.pncBabyRequest")
@LoginAsAdmin
@ApiSession
public void handleFormEvent(MotechEvent motechEvent) {
PNCBabyForm pncBabyForm = (PNCBabyForm) motechEvent.getParameters().get(Constants.FORM_BEAN);
try {
childVisitService.save(createRequest(pncBabyForm));
} catch (Exception e) {
log.error("Exception occured in saving Delivery Notification details for: " + pncBabyForm.getMotechId(), e);
}
}
private PNCBabyRequest createRequest(PNCBabyForm pncBabyForm) {
Facility facility = facilityService.getFacilityByMotechId(pncBabyForm.getFacilityId());
MRSUser staff = staffService.getUserByEmailIdOrMotechId(pncBabyForm.getStaffId());
Patient patient = patientService.getPatientByMotechId(pncBabyForm.getMotechId());
return new PNCBabyRequest()
.patient(patient)
.facility(facility)
.staff(staff)
- .visit(Integer.parseInt(pncBabyForm.getVisitNumber()))
+ .visit(pncBabyForm.getVisitNumber())
.weight(pncBabyForm.getWeight())
.temperature(pncBabyForm.getTemperature())
.location(pncBabyForm.getLocation())
.house(pncBabyForm.getHouse())
.community(pncBabyForm.getCommunity())
.referred(pncBabyForm.getReferred())
.maleInvolved(pncBabyForm.getMaleInvolved())
.date(pncBabyForm.getDate())
.respiration(pncBabyForm.getRespiration())
.cordConditionNormal(pncBabyForm.getCordConditionNormal())
.babyConditionGood(pncBabyForm.getBabyConditionGood())
.bcg(pncBabyForm.getBcg())
.opv0(pncBabyForm.getOpv0())
.comments(pncBabyForm.getComments());
}
}
| true | true | private PNCBabyRequest createRequest(PNCBabyForm pncBabyForm) {
Facility facility = facilityService.getFacilityByMotechId(pncBabyForm.getFacilityId());
MRSUser staff = staffService.getUserByEmailIdOrMotechId(pncBabyForm.getStaffId());
Patient patient = patientService.getPatientByMotechId(pncBabyForm.getMotechId());
return new PNCBabyRequest()
.patient(patient)
.facility(facility)
.staff(staff)
.visit(Integer.parseInt(pncBabyForm.getVisitNumber()))
.weight(pncBabyForm.getWeight())
.temperature(pncBabyForm.getTemperature())
.location(pncBabyForm.getLocation())
.house(pncBabyForm.getHouse())
.community(pncBabyForm.getCommunity())
.referred(pncBabyForm.getReferred())
.maleInvolved(pncBabyForm.getMaleInvolved())
.date(pncBabyForm.getDate())
.respiration(pncBabyForm.getRespiration())
.cordConditionNormal(pncBabyForm.getCordConditionNormal())
.babyConditionGood(pncBabyForm.getBabyConditionGood())
.bcg(pncBabyForm.getBcg())
.opv0(pncBabyForm.getOpv0())
.comments(pncBabyForm.getComments());
}
| private PNCBabyRequest createRequest(PNCBabyForm pncBabyForm) {
Facility facility = facilityService.getFacilityByMotechId(pncBabyForm.getFacilityId());
MRSUser staff = staffService.getUserByEmailIdOrMotechId(pncBabyForm.getStaffId());
Patient patient = patientService.getPatientByMotechId(pncBabyForm.getMotechId());
return new PNCBabyRequest()
.patient(patient)
.facility(facility)
.staff(staff)
.visit(pncBabyForm.getVisitNumber())
.weight(pncBabyForm.getWeight())
.temperature(pncBabyForm.getTemperature())
.location(pncBabyForm.getLocation())
.house(pncBabyForm.getHouse())
.community(pncBabyForm.getCommunity())
.referred(pncBabyForm.getReferred())
.maleInvolved(pncBabyForm.getMaleInvolved())
.date(pncBabyForm.getDate())
.respiration(pncBabyForm.getRespiration())
.cordConditionNormal(pncBabyForm.getCordConditionNormal())
.babyConditionGood(pncBabyForm.getBabyConditionGood())
.bcg(pncBabyForm.getBcg())
.opv0(pncBabyForm.getOpv0())
.comments(pncBabyForm.getComments());
}
|
diff --git a/src/frontend/Parser.java b/src/frontend/Parser.java
index 78e519d..d108298 100644
--- a/src/frontend/Parser.java
+++ b/src/frontend/Parser.java
@@ -1,204 +1,211 @@
package frontend;
import intermediate.IntermediateCode;
import intermediate.SymbolTable;
import intermediate.SymbolTableEntry;
import intermediate.SymbolTableStack;
import java.io.IOException;
import java.util.ArrayList;
public class Parser {
protected SymbolTableStack symbolTableStack;
protected SymbolTable symbolTable;
protected ArrayList<IntermediateCode> topLevelLists;
protected Scanner scanner;
private int counter = 0;
private boolean initial = true;
public Parser(Scanner scanner) {
topLevelLists = new ArrayList<IntermediateCode>();
symbolTableStack = new SymbolTableStack();
symbolTable = new SymbolTable();
symbolTableStack.push(symbolTable);
this.scanner = scanner;
}
public Parser(SymbolTableStack symbolTableStack, Scanner scanner) {
this.symbolTableStack = symbolTableStack;
this.scanner = scanner;
topLevelLists = new ArrayList<IntermediateCode>();
symbolTable = new SymbolTable();
symbolTableStack.push(symbolTable);
}
public IntermediateCode parse() throws IOException {
System.out.println("\n----------Printing Tokens---------\n");
Token token = nextToken(); // Get first character
while (scanner.peekChar() != Source.EOF) {
IntermediateCode root = parseList();
topLevelLists.add(root);
}
return null;
}
public IntermediateCode parseList() throws IOException {
IntermediateCode rootNode = null;
try {
Token token = nextToken(); // Consume (
rootNode = new IntermediateCode();
IntermediateCode newNode;
SymbolTableEntry symbol;
switch (token.getType()) {
case LEFT_PAREN:
rootNode.setCar(parseList());
rootNode.setCdr(parseList());
break;
case DEFINE:
newNode = new IntermediateCode();
newNode.setText(token.getText());
rootNode.setCar(newNode);
newNode = new IntermediateCode();
rootNode.setCdr(newNode);
token = nextToken(); // Consume define
newNode.setCar(new IntermediateCode());
newNode.getCar().setText(token.getText());
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
token = nextToken(); // Consume identifier
newNode.setCdr(parseList());
break;
case LAMBDA:
newNode = new IntermediateCode();
- newNode.setText(token.getText());
rootNode.setCar(newNode);
- newNode = new IntermediateCode();
- rootNode.setCdr(newNode);
+ newNode.setCar(new IntermediateCode());
+ newNode.getCar().setText(token.getText());
+ newNode.setCdr(new IntermediateCode());
+ newNode = newNode.getCdr();
+ newNode.setCar(new IntermediateCode());
+ newNode = newNode.getCar();
token = nextToken(); // Consume lambda
token = nextToken(); // Consume (
while (token.getType() == TokenType.REGULAR_SYMBOL) {
IntermediateCode temp = new IntermediateCode();
temp.setText(token.getText());
newNode.setCar(temp);
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
- token = nextToken();
+ token = nextToken(); // Consume identifier
if (token.getType() == TokenType.REGULAR_SYMBOL) {
newNode.setCdr(new IntermediateCode());
newNode = newNode.getCdr();
}
}
token = nextToken(); // Consume )
- rootNode.getCdr().setCdr(parseList());
+ rootNode.getCar().getCdr().setCdr(parseList());
break;
case LET:
newNode = new IntermediateCode();
- newNode.setText(token.getText());
rootNode.setCar(newNode);
+ newNode.setCar(new IntermediateCode());
+ newNode.getCar().setText(token.getText());
token = nextToken(); // Consume let
- rootNode.setCdr(parseList());
+ newNode.setCdr(new IntermediateCode());
+ newNode = newNode.getCdr();
+ newNode.setCar(parseList());
+ newNode.setCdr(parseList());
break;
case RESERVED_SYMBOL:
case REGULAR_SYMBOL:
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
// Do something
break;
default:
// Do something else if not one of the above
}
} catch (IOException ex) { ex.printStackTrace(); }
return rootNode;
}
/*
public IntermediateCode parseList() {
IntermediateCode newNode = null;
if(!initial && counter == 0) {
initial = true;
return null;
}
initial = false;
try {
Token token = nextToken(); // Consume (
System.out.print("\t" + token.getText() + "\t");
if (TokenType.RESERVED_WORDS.containsKey(token.getText())) {
System.out.println("Reserved Word");
}
else if (TokenType.RESERVED_SYMBOLS.containsKey(token.getText())) {
System.out.println("Reserved Symbol");
}
else if (token.getType() == TokenType.REGULAR_SYMBOL) {
System.out.println("Symbol");
}
else if (token.getType() == TokenType.INTEGER) {
System.out.println("Integer");
}
else if (token.getType() == TokenType.REAL) {
System.out.println("Real");
}
if (scanner.getPosition() == 0) {
System.out.println(scanner);
}
switch (token.getType()) {
case LEFT_PAREN:
counter++;
newNode = new IntermediateCode();
newNode.setCar(parseList());
newNode.setCdr(parseList());
break;
case RIGHT_PAREN:
counter--;
case END_OF_FILE:
break;
case REGULAR_SYMBOL:
SymbolTableEntry entry = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), entry);
default:
newNode = new IntermediateCode();
newNode.setText(token.getText());
newNode.setType(token.getType());
newNode.setCdr(parseList());
}
}
catch (IOException e) {
e.printStackTrace();
}
return newNode;
}
*/
public Scanner getScanner() {
return scanner;
}
public ArrayList<IntermediateCode> getICodes() {
return topLevelLists;
}
public SymbolTableStack getSymTabStack() {
return symbolTableStack;
}
public Token currentToken() {
return scanner.currentToken();
}
public Token nextToken() throws IOException {
return scanner.nextToken();
}
}
| false | true | public IntermediateCode parseList() throws IOException {
IntermediateCode rootNode = null;
try {
Token token = nextToken(); // Consume (
rootNode = new IntermediateCode();
IntermediateCode newNode;
SymbolTableEntry symbol;
switch (token.getType()) {
case LEFT_PAREN:
rootNode.setCar(parseList());
rootNode.setCdr(parseList());
break;
case DEFINE:
newNode = new IntermediateCode();
newNode.setText(token.getText());
rootNode.setCar(newNode);
newNode = new IntermediateCode();
rootNode.setCdr(newNode);
token = nextToken(); // Consume define
newNode.setCar(new IntermediateCode());
newNode.getCar().setText(token.getText());
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
token = nextToken(); // Consume identifier
newNode.setCdr(parseList());
break;
case LAMBDA:
newNode = new IntermediateCode();
newNode.setText(token.getText());
rootNode.setCar(newNode);
newNode = new IntermediateCode();
rootNode.setCdr(newNode);
token = nextToken(); // Consume lambda
token = nextToken(); // Consume (
while (token.getType() == TokenType.REGULAR_SYMBOL) {
IntermediateCode temp = new IntermediateCode();
temp.setText(token.getText());
newNode.setCar(temp);
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
token = nextToken();
if (token.getType() == TokenType.REGULAR_SYMBOL) {
newNode.setCdr(new IntermediateCode());
newNode = newNode.getCdr();
}
}
token = nextToken(); // Consume )
rootNode.getCdr().setCdr(parseList());
break;
case LET:
newNode = new IntermediateCode();
newNode.setText(token.getText());
rootNode.setCar(newNode);
token = nextToken(); // Consume let
rootNode.setCdr(parseList());
break;
case RESERVED_SYMBOL:
case REGULAR_SYMBOL:
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
// Do something
break;
default:
// Do something else if not one of the above
}
} catch (IOException ex) { ex.printStackTrace(); }
return rootNode;
}
| public IntermediateCode parseList() throws IOException {
IntermediateCode rootNode = null;
try {
Token token = nextToken(); // Consume (
rootNode = new IntermediateCode();
IntermediateCode newNode;
SymbolTableEntry symbol;
switch (token.getType()) {
case LEFT_PAREN:
rootNode.setCar(parseList());
rootNode.setCdr(parseList());
break;
case DEFINE:
newNode = new IntermediateCode();
newNode.setText(token.getText());
rootNode.setCar(newNode);
newNode = new IntermediateCode();
rootNode.setCdr(newNode);
token = nextToken(); // Consume define
newNode.setCar(new IntermediateCode());
newNode.getCar().setText(token.getText());
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
token = nextToken(); // Consume identifier
newNode.setCdr(parseList());
break;
case LAMBDA:
newNode = new IntermediateCode();
rootNode.setCar(newNode);
newNode.setCar(new IntermediateCode());
newNode.getCar().setText(token.getText());
newNode.setCdr(new IntermediateCode());
newNode = newNode.getCdr();
newNode.setCar(new IntermediateCode());
newNode = newNode.getCar();
token = nextToken(); // Consume lambda
token = nextToken(); // Consume (
while (token.getType() == TokenType.REGULAR_SYMBOL) {
IntermediateCode temp = new IntermediateCode();
temp.setText(token.getText());
newNode.setCar(temp);
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
token = nextToken(); // Consume identifier
if (token.getType() == TokenType.REGULAR_SYMBOL) {
newNode.setCdr(new IntermediateCode());
newNode = newNode.getCdr();
}
}
token = nextToken(); // Consume )
rootNode.getCar().getCdr().setCdr(parseList());
break;
case LET:
newNode = new IntermediateCode();
rootNode.setCar(newNode);
newNode.setCar(new IntermediateCode());
newNode.getCar().setText(token.getText());
token = nextToken(); // Consume let
newNode.setCdr(new IntermediateCode());
newNode = newNode.getCdr();
newNode.setCar(parseList());
newNode.setCdr(parseList());
break;
case RESERVED_SYMBOL:
case REGULAR_SYMBOL:
symbol = new SymbolTableEntry(token.getText(), symbolTable);
symbolTable.put(token.getText(), symbol);
// Do something
break;
default:
// Do something else if not one of the above
}
} catch (IOException ex) { ex.printStackTrace(); }
return rootNode;
}
|
diff --git a/CapstoneProject/src/java/capstone/server/FriendManager.java b/CapstoneProject/src/java/capstone/server/FriendManager.java
index 0688045..f72ac11 100644
--- a/CapstoneProject/src/java/capstone/server/FriendManager.java
+++ b/CapstoneProject/src/java/capstone/server/FriendManager.java
@@ -1,132 +1,132 @@
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package capstone.server;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
*
* @author lowkeylukey
*/
public class FriendManager extends HttpServlet {
/**
* Processes requests for both HTTP
* <code>GET</code> and
* <code>POST</code> methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(request.getParameter("form").equals("add")) {
String player = request.getParameter("player");
String friend = request.getParameter("friend");
//check to see if friend exists
if(!databaseAccess.playerExists(friend))
{
- String message = "Player " + friend + " does not exist";
+ String message = "There is no player with the user name" + friend;
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(player == friend)
{
String message = "Cannot add yourself as a friend";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
//check to see if already friends
if(databaseAccess.areFriends(player, friend)) {
String message = "You are already friends";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(databaseAccess.addFriend(player, friend)) {
String message = "Friend request sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Friend request could not be sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("form").equals("requests")) {
if(request.getParameter("submit").equals("Accept Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.acceptFriend(player, friend)) {
String message = "Friend accepted";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("submit").equals("Decline Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.declineRequest(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
} else if (request.getParameter("form").equals("friends")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendsField");
if(databaseAccess.removeFriend(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP
* <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP
* <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
| true | true | protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(request.getParameter("form").equals("add")) {
String player = request.getParameter("player");
String friend = request.getParameter("friend");
//check to see if friend exists
if(!databaseAccess.playerExists(friend))
{
String message = "Player " + friend + " does not exist";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(player == friend)
{
String message = "Cannot add yourself as a friend";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
//check to see if already friends
if(databaseAccess.areFriends(player, friend)) {
String message = "You are already friends";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(databaseAccess.addFriend(player, friend)) {
String message = "Friend request sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Friend request could not be sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("form").equals("requests")) {
if(request.getParameter("submit").equals("Accept Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.acceptFriend(player, friend)) {
String message = "Friend accepted";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("submit").equals("Decline Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.declineRequest(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
} else if (request.getParameter("form").equals("friends")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendsField");
if(databaseAccess.removeFriend(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
}
| protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if(request.getParameter("form").equals("add")) {
String player = request.getParameter("player");
String friend = request.getParameter("friend");
//check to see if friend exists
if(!databaseAccess.playerExists(friend))
{
String message = "There is no player with the user name" + friend;
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(player == friend)
{
String message = "Cannot add yourself as a friend";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
//check to see if already friends
if(databaseAccess.areFriends(player, friend)) {
String message = "You are already friends";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
return;
}
if(databaseAccess.addFriend(player, friend)) {
String message = "Friend request sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Friend request could not be sent";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("form").equals("requests")) {
if(request.getParameter("submit").equals("Accept Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.acceptFriend(player, friend)) {
String message = "Friend accepted";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
} else if (request.getParameter("submit").equals("Decline Request")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendRequestsField");
if(databaseAccess.declineRequest(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
} else if (request.getParameter("form").equals("friends")) {
String player = request.getParameter("player");
String friend = request.getParameter("friendsField");
if(databaseAccess.removeFriend(player, friend)) {
this.getServletContext().getRequestDispatcher("/accountManagement.jsp").forward(request, response);
} else {
String message = "Error: Please try again";
this.getServletContext().getRequestDispatcher("/accountManagement.jsp?requestmessage="+message).forward(request, response);
}
}
}
|
diff --git a/src/com/csipsimple/wizards/impl/Pbxes.java b/src/com/csipsimple/wizards/impl/Pbxes.java
index d4ecaee3..8e975b67 100644
--- a/src/com/csipsimple/wizards/impl/Pbxes.java
+++ b/src/com/csipsimple/wizards/impl/Pbxes.java
@@ -1,65 +1,65 @@
/**
* Copyright (C) 2010-2012 Regis Montoya (aka r3gis - www.r3gis.fr)
* This file is part of CSipSimple.
*
* CSipSimple is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* If you own a pjsip commercial license you can also redistribute it
* and/or modify it under the terms of the GNU Lesser General Public License
* as an android library.
*
* CSipSimple is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with CSipSimple. If not, see <http://www.gnu.org/licenses/>.
*/
package com.csipsimple.wizards.impl;
import com.csipsimple.api.SipConfigManager;
import com.csipsimple.api.SipProfile;
import com.csipsimple.utils.PreferencesWrapper;
public class Pbxes extends SimpleImplementation {
@Override
protected String getDomain() {
return "pbxes.org";
}
@Override
protected String getDefaultName() {
return "Pbxes.org";
}
@Override
public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
- acc.vm_nbr = "*43";
+ acc.vm_nbr = "*97";
return acc;
}
@Override
public void setDefaultParams(PreferencesWrapper prefs) {
super.setDefaultParams(prefs);
// We need to change T1 value because pbxes.org drop registrations when retransmition are made by SIP client
prefs.setPreferenceStringValue(SipConfigManager.TSX_T1_TIMEOUT, "1000");
}
@Override
public boolean needRestart() {
return true;
}
@Override
protected boolean canTcp() {
return false; // Cause there is something really wrong on the pbxes.org server
}
}
| true | true | public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
acc.vm_nbr = "*43";
return acc;
}
| public SipProfile buildAccount(SipProfile account) {
SipProfile acc = super.buildAccount(account);
acc.vm_nbr = "*97";
return acc;
}
|
diff --git a/adminshell/src/main/java/org/teiid/adminshell/MigrationUtil.java b/adminshell/src/main/java/org/teiid/adminshell/MigrationUtil.java
index d2ce6797bc..18938775ea 100644
--- a/adminshell/src/main/java/org/teiid/adminshell/MigrationUtil.java
+++ b/adminshell/src/main/java/org/teiid/adminshell/MigrationUtil.java
@@ -1,202 +1,197 @@
/*
* JBoss, Home of Professional Open Source.
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership. Some portions may be licensed
* to Red Hat, Inc. under one or more contributor license agreements.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*/
package org.teiid.adminshell;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream;
import javax.xml.transform.Result;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.teiid.core.util.ApplicationInfo;
import org.teiid.core.util.FileUtils;
import org.teiid.core.util.ObjectConverterUtil;
@SuppressWarnings("nls")
public class MigrationUtil {
public static void main(String[] args) throws IOException, TransformerConfigurationException, TransformerFactoryConfigurationError, TransformerException {
if (args.length != 1) {
System.err.println(
"Teiid 7.0 VDB Migration Utility" +
"\n\nUsage:" +
"\n A vdb or .def file must be specified as the only argument." +
"\n\nResult:"+
"\n 7.0 compatible replacement files will be created in the same directory " +
"\n as your file." +
"\n If you supply a vdb, the new vdb file will have a _70.vdb suffix." +
- "\n If you supply a dynamic vdb file, then two new files will be created: " +
- "\n <file name>-vdb.xml and <file name>-bindings.xml" +
- "\n\nNote: this program will only create connector binding connection factories " +
- "\n if the bindings are present in the specified file." +
+ "\n If you supply a dynamic vdb file, <file name>-vdb.xml is created " +
+ "\n\nNote: This program will create translator names by Connector's Component Type name" +
+ "\n As they are not gureented to match; recheck their for their validity" +
"\n\nNote: this program will NOT create the -ds.xml files needed by JBoss to " +
"\n create underlying DataSource connection pools." +
"\n You will need to manually create one -ds.xml for each JDBC DataSource " +
"\n with a JNDI name of <connector binding name>DS, " +
- "\n where any spaces in the name are replace by _" +
- "\n\nNode: depending upon the connectors used, you may need to manually edit the " +
- " -bindings.xml file.");
+ "\n where any spaces in the name are replace by _");
System.exit(-1);
}
File file = new File(args[0]);
if (!file.exists()) {
System.err.println(args[0] + " does not exist."); //$NON-NLS-1$
System.exit(-1);
}
String fullName = file.getName();
String fileName = fullName.substring(0, fullName.length() - 4);
String ext = FileUtils.getExtension(file);
if (ext == null) {
System.err.println(fullName + " is not a vdb or xml file."); //$NON-NLS-1$
System.exit(-1);
}
ext = ext.toLowerCase();
if (ext.endsWith("vdb")) {
File dir = createTempDirectory();
try {
extract(file, dir);
File metainf = new File(dir, "META-INF");
File config = new File(dir, "ConfigurationInfo.def");
File manifest = new File(dir, "MetaMatrix-VdbManifestModel.xmi");
if (manifest.exists()) {
String configStr = ObjectConverterUtil.convertFileToString(config);
String manifestStr = ObjectConverterUtil.convertFileToString(manifest);
int index = configStr.lastIndexOf("</VDB>");
int manifestBegin = manifestStr.indexOf("<xmi");
configStr = configStr.substring(0, index) + manifestStr.substring(manifestBegin) + "</VDB>";
FileUtils.write(configStr.getBytes(), config);
manifest.delete();
}
transformConfig(config, "/vdb.xsl", new StreamResult(new File(metainf, "vdb.xml")));
- transformConfig(config, "/connector.xsl", new StreamResult(new File(file.getParentFile(), fileName + "-bindings-ds.xml")));
config.delete();
FileOutputStream out = new FileOutputStream(new File(file.getParent(), fileName + "_70.vdb"));
ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(out));
int parentLength = dir.getPath().length();
addDirectory(dir, zos, parentLength);
zos.close();
} finally {
FileUtils.removeDirectoryAndChildren(dir);
}
} else if (ext.endsWith("xml") || ext.endsWith("def")){
File parent = file.getParentFile();
transformConfig(file, "/vdb.xsl", new StreamResult(new File(parent, fileName + "-vdb.xml")));
- transformConfig(file, "/connector.xsl", new StreamResult(new File(parent, fileName + "-bindings-ds.xml")));
} else {
System.err.println(fullName + " is not a vdb or xml file. Run with no arguments for help."); //$NON-NLS-1$
System.exit(-1);
}
}
private static void addDirectory(File dir, ZipOutputStream zos,
int parentLength) throws IOException {
String[] files = dir.list();
for (String entry : files) {
File f = new File(dir, entry);
if (f.isDirectory()) {
addDirectory(f, zos, parentLength);
} else {
ZipEntry e = new ZipEntry(f.getPath().substring(parentLength));
zos.putNextEntry(e);
FileUtils.write(f, zos);
zos.closeEntry();
}
}
}
private static void transformConfig(File config, String styleSheet, Result target)
throws TransformerFactoryConfigurationError,
TransformerConfigurationException, TransformerException {
TransformerFactory tf = TransformerFactory.newInstance();
Transformer t = tf.newTransformer(new StreamSource(MigrationUtil.class.getResourceAsStream(styleSheet)));
t.setParameter("version", ApplicationInfo.getInstance().getReleaseNumber()); //$NON-NLS-1$
t.transform(new StreamSource(config), target);
}
/**
* Extract the given zip file to the given destination directory base.
*
* @param zipFileName
* The full path and file name of the Zip file to extract.
* @param destinationDirectory
* The root directory to extract to.
* @throws IOException
*/
static void extract(final File sourceZipFile, File unzipDestinationDirectory) throws IOException {
// Open Zip file for reading
ZipFile zipFile = new ZipFile(sourceZipFile, ZipFile.OPEN_READ);
// Create an enumeration of the entries in the zip file
Enumeration zipFileEntries = zipFile.entries();
// Process each entry
while (zipFileEntries.hasMoreElements()) {
// grab a zip file entry
ZipEntry entry = (ZipEntry) zipFileEntries.nextElement();
String currentEntry = entry.getName();
File destFile = new File(unzipDestinationDirectory, currentEntry);
// grab file's parent directory structure
File destinationParent = destFile.getParentFile();
// create the parent directory structure if needed
destinationParent.mkdirs();
// extract file if not a directory
if (!entry.isDirectory()) {
ObjectConverterUtil.write(zipFile.getInputStream(entry),
destFile);
}
}
zipFile.close();
}
static File createTempDirectory() throws IOException {
File temp = File.createTempFile("temp", Long.toString(System.nanoTime()));
temp.delete();
if (!(temp.mkdir())) {
throw new IOException("Could not create temp directory: "
+ temp.getAbsolutePath());
}
return temp;
}
}
| false | true | public static void main(String[] args) throws IOException, TransformerConfigurationException, TransformerFactoryConfigurationError, TransformerException {
if (args.length != 1) {
System.err.println(
"Teiid 7.0 VDB Migration Utility" +
"\n\nUsage:" +
"\n A vdb or .def file must be specified as the only argument." +
"\n\nResult:"+
"\n 7.0 compatible replacement files will be created in the same directory " +
"\n as your file." +
"\n If you supply a vdb, the new vdb file will have a _70.vdb suffix." +
"\n If you supply a dynamic vdb file, then two new files will be created: " +
"\n <file name>-vdb.xml and <file name>-bindings.xml" +
"\n\nNote: this program will only create connector binding connection factories " +
"\n if the bindings are present in the specified file." +
"\n\nNote: this program will NOT create the -ds.xml files needed by JBoss to " +
"\n create underlying DataSource connection pools." +
"\n You will need to manually create one -ds.xml for each JDBC DataSource " +
"\n with a JNDI name of <connector binding name>DS, " +
"\n where any spaces in the name are replace by _" +
"\n\nNode: depending upon the connectors used, you may need to manually edit the " +
" -bindings.xml file.");
System.exit(-1);
}
File file = new File(args[0]);
if (!file.exists()) {
System.err.println(args[0] + " does not exist."); //$NON-NLS-1$
System.exit(-1);
}
String fullName = file.getName();
String fileName = fullName.substring(0, fullName.length() - 4);
String ext = FileUtils.getExtension(file);
if (ext == null) {
System.err.println(fullName + " is not a vdb or xml file."); //$NON-NLS-1$
System.exit(-1);
}
ext = ext.toLowerCase();
if (ext.endsWith("vdb")) {
File dir = createTempDirectory();
try {
extract(file, dir);
File metainf = new File(dir, "META-INF");
File config = new File(dir, "ConfigurationInfo.def");
File manifest = new File(dir, "MetaMatrix-VdbManifestModel.xmi");
if (manifest.exists()) {
String configStr = ObjectConverterUtil.convertFileToString(config);
String manifestStr = ObjectConverterUtil.convertFileToString(manifest);
int index = configStr.lastIndexOf("</VDB>");
int manifestBegin = manifestStr.indexOf("<xmi");
configStr = configStr.substring(0, index) + manifestStr.substring(manifestBegin) + "</VDB>";
FileUtils.write(configStr.getBytes(), config);
manifest.delete();
}
transformConfig(config, "/vdb.xsl", new StreamResult(new File(metainf, "vdb.xml")));
transformConfig(config, "/connector.xsl", new StreamResult(new File(file.getParentFile(), fileName + "-bindings-ds.xml")));
config.delete();
FileOutputStream out = new FileOutputStream(new File(file.getParent(), fileName + "_70.vdb"));
ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(out));
int parentLength = dir.getPath().length();
addDirectory(dir, zos, parentLength);
zos.close();
} finally {
FileUtils.removeDirectoryAndChildren(dir);
}
} else if (ext.endsWith("xml") || ext.endsWith("def")){
File parent = file.getParentFile();
transformConfig(file, "/vdb.xsl", new StreamResult(new File(parent, fileName + "-vdb.xml")));
transformConfig(file, "/connector.xsl", new StreamResult(new File(parent, fileName + "-bindings-ds.xml")));
} else {
System.err.println(fullName + " is not a vdb or xml file. Run with no arguments for help."); //$NON-NLS-1$
System.exit(-1);
}
}
| public static void main(String[] args) throws IOException, TransformerConfigurationException, TransformerFactoryConfigurationError, TransformerException {
if (args.length != 1) {
System.err.println(
"Teiid 7.0 VDB Migration Utility" +
"\n\nUsage:" +
"\n A vdb or .def file must be specified as the only argument." +
"\n\nResult:"+
"\n 7.0 compatible replacement files will be created in the same directory " +
"\n as your file." +
"\n If you supply a vdb, the new vdb file will have a _70.vdb suffix." +
"\n If you supply a dynamic vdb file, <file name>-vdb.xml is created " +
"\n\nNote: This program will create translator names by Connector's Component Type name" +
"\n As they are not gureented to match; recheck their for their validity" +
"\n\nNote: this program will NOT create the -ds.xml files needed by JBoss to " +
"\n create underlying DataSource connection pools." +
"\n You will need to manually create one -ds.xml for each JDBC DataSource " +
"\n with a JNDI name of <connector binding name>DS, " +
"\n where any spaces in the name are replace by _");
System.exit(-1);
}
File file = new File(args[0]);
if (!file.exists()) {
System.err.println(args[0] + " does not exist."); //$NON-NLS-1$
System.exit(-1);
}
String fullName = file.getName();
String fileName = fullName.substring(0, fullName.length() - 4);
String ext = FileUtils.getExtension(file);
if (ext == null) {
System.err.println(fullName + " is not a vdb or xml file."); //$NON-NLS-1$
System.exit(-1);
}
ext = ext.toLowerCase();
if (ext.endsWith("vdb")) {
File dir = createTempDirectory();
try {
extract(file, dir);
File metainf = new File(dir, "META-INF");
File config = new File(dir, "ConfigurationInfo.def");
File manifest = new File(dir, "MetaMatrix-VdbManifestModel.xmi");
if (manifest.exists()) {
String configStr = ObjectConverterUtil.convertFileToString(config);
String manifestStr = ObjectConverterUtil.convertFileToString(manifest);
int index = configStr.lastIndexOf("</VDB>");
int manifestBegin = manifestStr.indexOf("<xmi");
configStr = configStr.substring(0, index) + manifestStr.substring(manifestBegin) + "</VDB>";
FileUtils.write(configStr.getBytes(), config);
manifest.delete();
}
transformConfig(config, "/vdb.xsl", new StreamResult(new File(metainf, "vdb.xml")));
config.delete();
FileOutputStream out = new FileOutputStream(new File(file.getParent(), fileName + "_70.vdb"));
ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(out));
int parentLength = dir.getPath().length();
addDirectory(dir, zos, parentLength);
zos.close();
} finally {
FileUtils.removeDirectoryAndChildren(dir);
}
} else if (ext.endsWith("xml") || ext.endsWith("def")){
File parent = file.getParentFile();
transformConfig(file, "/vdb.xsl", new StreamResult(new File(parent, fileName + "-vdb.xml")));
} else {
System.err.println(fullName + " is not a vdb or xml file. Run with no arguments for help."); //$NON-NLS-1$
System.exit(-1);
}
}
|
diff --git a/jsf-ri/src/main/java/com/sun/faces/facelets/flow/MethodTagHandler.java b/jsf-ri/src/main/java/com/sun/faces/facelets/flow/MethodTagHandler.java
index af3cace65..b5ae0a1e4 100644
--- a/jsf-ri/src/main/java/com/sun/faces/facelets/flow/MethodTagHandler.java
+++ b/jsf-ri/src/main/java/com/sun/faces/facelets/flow/MethodTagHandler.java
@@ -1,72 +1,73 @@
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 1997-2012 Oracle and/or its affiliates. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can
* obtain a copy of the License at
* https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
* or packager/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at packager/legal/LICENSE.txt.
*
* GPL Classpath Exception:
* Oracle designates this particular file as subject to the "Classpath"
* exception as provided by Oracle in the GPL Version 2 section of the License
* file that accompanied this code.
*
* Modifications:
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
* "Portions Copyright [year] [name of copyright owner]"
*
* Contributor(s):
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package com.sun.faces.facelets.flow;
import com.sun.faces.facelets.tag.TagHandlerImpl;
import java.io.IOException;
import javax.el.ExpressionFactory;
import javax.el.MethodExpression;
import javax.faces.component.UIComponent;
import javax.faces.flow.MethodCallNode;
import javax.faces.view.facelets.FaceletContext;
import javax.faces.view.facelets.TagConfig;
public class MethodTagHandler extends TagHandlerImpl {
public MethodTagHandler(TagConfig config) {
super(config);
}
public void apply(FaceletContext ctx, UIComponent parent) throws IOException {
this.nextHandler.apply(ctx, parent);
if (MethodCallTagHandler.isWithinMethodCall(ctx)) {
MethodCallNode cur = MethodCallTagHandler.getCurrentMethodCall(ctx);
ExpressionFactory expressionFactory = ctx.getFacesContext().getApplication().getExpressionFactory();
String expressionString = this.nextHandler.toString();
- MethodExpression me = expressionFactory.createMethodExpression(ctx, expressionString, Object.class, null);
+ // PENDING(edburns) how do parameters work here?
+ MethodExpression me = expressionFactory.createMethodExpression(ctx, expressionString, Object.class, new Class [] {});
cur.setMethodExpression(me);
}
}
}
| true | true | public void apply(FaceletContext ctx, UIComponent parent) throws IOException {
this.nextHandler.apply(ctx, parent);
if (MethodCallTagHandler.isWithinMethodCall(ctx)) {
MethodCallNode cur = MethodCallTagHandler.getCurrentMethodCall(ctx);
ExpressionFactory expressionFactory = ctx.getFacesContext().getApplication().getExpressionFactory();
String expressionString = this.nextHandler.toString();
MethodExpression me = expressionFactory.createMethodExpression(ctx, expressionString, Object.class, null);
cur.setMethodExpression(me);
}
}
| public void apply(FaceletContext ctx, UIComponent parent) throws IOException {
this.nextHandler.apply(ctx, parent);
if (MethodCallTagHandler.isWithinMethodCall(ctx)) {
MethodCallNode cur = MethodCallTagHandler.getCurrentMethodCall(ctx);
ExpressionFactory expressionFactory = ctx.getFacesContext().getApplication().getExpressionFactory();
String expressionString = this.nextHandler.toString();
// PENDING(edburns) how do parameters work here?
MethodExpression me = expressionFactory.createMethodExpression(ctx, expressionString, Object.class, new Class [] {});
cur.setMethodExpression(me);
}
}
|
diff --git a/qcadoo-model/src/main/java/com/qcadoo/model/internal/file/FileServiceImpl.java b/qcadoo-model/src/main/java/com/qcadoo/model/internal/file/FileServiceImpl.java
index 95450ff03..fb4e654fd 100644
--- a/qcadoo-model/src/main/java/com/qcadoo/model/internal/file/FileServiceImpl.java
+++ b/qcadoo-model/src/main/java/com/qcadoo/model/internal/file/FileServiceImpl.java
@@ -1,216 +1,216 @@
/**
* ***************************************************************************
* Copyright (c) 2010 Qcadoo Limited
* Project: Qcadoo Framework
* Version: 1.1.2
*
* This file is part of Qcadoo.
*
* Qcadoo is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation; either version 3 of the License,
* or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
* ***************************************************************************
*/
package com.qcadoo.model.internal.file;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.activation.MimetypesFileTypeMap;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import com.qcadoo.localization.api.utils.DateUtils;
import com.qcadoo.model.api.Entity;
import com.qcadoo.model.api.file.FileService;
import com.qcadoo.tenant.api.MultiTenantUtil;
@Service
public class FileServiceImpl implements FileService {
private static final Logger LOG = LoggerFactory.getLogger(FileServiceImpl.class);
private static FileService instance;
private final String fileUrlPrefix = "/files/";
private File uploadDirectory;
public FileServiceImpl() {
FileServiceImpl.setInstance(this);
}
@Value("${reportPath}")
public void setUploadDirectory(final String uploadDirectory) {
this.uploadDirectory = new File(uploadDirectory);
}
@Override
public String getName(final String path) {
if (!StringUtils.hasText(path)) {
return null;
}
return path.substring(path.lastIndexOf(File.separatorChar) + 15);
}
@Override
public String getLastModificationDate(final String path) {
if (!StringUtils.hasText(path)) {
return null;
}
Date date = new Date(Long.valueOf(path.substring(path.lastIndexOf(File.separatorChar) + 1,
path.lastIndexOf(File.separatorChar) + 14)));
return new SimpleDateFormat(DateUtils.DATE_FORMAT).format(date);
}
@Override
public String getUrl(final String path) {
if (!StringUtils.hasText(path)) {
return null;
}
return fileUrlPrefix + normalizeSeparators(path.substring(uploadDirectory.getAbsolutePath().length() + 1));
}
private String normalizeSeparators(final String string) {
if ("\\".equals(File.separator)) {
return string.replaceAll("\\\\", "/");
} else {
return string;
}
}
private String denormalizeSeparators(final String string) {
if ("\\".equals(File.separator)) {
return string.replaceAll("/", "\\\\");
} else {
return string;
}
}
@Override
public String getPathFromUrl(final String url) {
String denormalizedUrl = denormalizeSeparators(url);
return uploadDirectory.getAbsolutePath() + File.separator
+ denormalizedUrl.substring(denormalizedUrl.indexOf(File.separatorChar) + fileUrlPrefix.length() - 1);
}
@Override
public InputStream getInputStream(final String path) {
if (!StringUtils.hasText(path)) {
return null;
}
try {
return new FileInputStream(new File(path));
} catch (FileNotFoundException e) {
return null;
}
}
@Override
public String upload(final MultipartFile multipartFile) throws IOException {
- File file = getFileFromFilename(multipartFile.getOriginalFilename());
+ File file = getFileFromFilenameWithRandomDirectory(multipartFile.getOriginalFilename());
OutputStream output = null;
try {
output = new FileOutputStream(file);
IOUtils.copy(multipartFile.getInputStream(), output);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
IOUtils.closeQuietly(output);
throw e;
}
return file.getAbsolutePath();
}
@Override
public File createExportFile(final String filename) {
return getFileFromFilenameWithRandomDirectory(filename);
}
@Override
public File createReportFile(final String fileName) throws IOException {
return getFileFromFilename(fileName);
}
private File getFileFromFilename(final String filename) throws IOException {
File directory = new File(uploadDirectory, MultiTenantUtil.getCurrentTenantId() + File.separator);
try {
FileUtils.forceMkdir(directory);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
throw e;
}
return new File(directory, getNormalizedFileName(filename.substring(filename.lastIndexOf(File.separator) + 1)));
}
private File getFileFromFilenameWithRandomDirectory(final String filename) {
String date = Long.toString(System.currentTimeMillis());
File directory = new File(uploadDirectory, MultiTenantUtil.getCurrentTenantId() + File.separator
+ date.charAt(date.length() - 1) + File.separator + date.charAt(date.length() - 2) + File.separator);
directory.mkdirs();
return new File(directory, date + "_" + getNormalizedFileName(filename));
}
private String getNormalizedFileName(final String filename) {
return filename.replaceAll("[^a-zA-Z0-9.]+", "_");
}
@Override
public Entity updateReportFileName(final Entity entity, final String dateFieldName, final String name) {
entity.setField("fileName", getReportFullPath(name, (Date) entity.getField(dateFieldName)));
return entity.getDataDefinition().save(entity);
}
private String getReportFullPath(final String name, final Date date) {
return getReportPath() + name + "_" + DateUtils.REPORT_D_T_F.format(date);
}
private String getReportPath() {
return uploadDirectory.getAbsolutePath() + File.separator + MultiTenantUtil.getCurrentTenantId() + File.separator;
}
@Override
public String getContentType(final String path) {
return new MimetypesFileTypeMap().getContentType(new File(path));
}
@Override
public void remove(final String path) {
FileUtils.deleteQuietly(new File(path));
}
public static FileService getInstance() {
return instance;
}
private static void setInstance(final FileService instance) {
FileServiceImpl.instance = instance;
}
}
| true | true | public String upload(final MultipartFile multipartFile) throws IOException {
File file = getFileFromFilename(multipartFile.getOriginalFilename());
OutputStream output = null;
try {
output = new FileOutputStream(file);
IOUtils.copy(multipartFile.getInputStream(), output);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
IOUtils.closeQuietly(output);
throw e;
}
return file.getAbsolutePath();
}
| public String upload(final MultipartFile multipartFile) throws IOException {
File file = getFileFromFilenameWithRandomDirectory(multipartFile.getOriginalFilename());
OutputStream output = null;
try {
output = new FileOutputStream(file);
IOUtils.copy(multipartFile.getInputStream(), output);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
IOUtils.closeQuietly(output);
throw e;
}
return file.getAbsolutePath();
}
|
diff --git a/konakart_gestpay/src/main/java/com/konakartadmin/modules/payment/gestpay/Gestpay.java b/konakart_gestpay/src/main/java/com/konakartadmin/modules/payment/gestpay/Gestpay.java
index 11e3279..7288c14 100644
--- a/konakart_gestpay/src/main/java/com/konakartadmin/modules/payment/gestpay/Gestpay.java
+++ b/konakart_gestpay/src/main/java/com/konakartadmin/modules/payment/gestpay/Gestpay.java
@@ -1,182 +1,182 @@
/**
*
*/
package com.konakartadmin.modules.payment.gestpay;
import java.util.Date;
import com.konakart.bl.modules.payment.gestpay.GestpayConstants;
import com.konakart.util.Utils;
import com.konakartadmin.app.KKConfiguration;
import com.konakartadmin.bl.KKAdminBase;
import com.konakartadmin.modules.PaymentModule;
/**
* @author lrkwz
*/
public class Gestpay extends PaymentModule {
/**
* @return the config key stub
*/
public String getConfigKeyStub() {
if (configKeyStub == null) {
setConfigKeyStub(super.getConfigKeyStub() + "_GESTPAY");
}
return configKeyStub;
}
public String getModuleTitle() {
return getMsgs().getString("MODULE_PAYMENT_GESTPAY_TEXT_TITLE");
}
/**
* @return the implementation filename
*/
public String getImplementationFileName() {
return "Gestpay";
}
/**
* @return an array of configuration values for this payment module
*/
public KKConfiguration[] getConfigs() {
if (configs == null) {
configs = new KKConfiguration[8];
}
if (configs[0] != null && !Utils.isBlank(configs[0].getConfigurationKey())) {
return configs;
}
Date now = KKAdminBase.getKonakartTimeStampDate();
int i = 0;
int groupId = 6;
configs[i] = new KKConfiguration(
/* title */"Gestpay Status", GestpayConstants.MODULE_PAYMENT_GESTPAY_STATUS,
/* value */"true",
/* description */"If set to false, the Gestpay module will be unavailable",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Sort order of display", GestpayConstants.MODULE_PAYMENT_GESTPAY_SORT_ORDER,
/* value */"0",
/* description */"Sort Order of Gestpay module on the UI. Lowest is displayed first.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Gestpay Payment Zone", GestpayConstants.MODULE_PAYMENT_GESTPAY_ZONE,
/* value */"0",
/* description */"Zone where Gestpay module can be used. Otherwise it is disabled.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"tep_get_zone_class_title",
/* setFun */"tep_cfg_pull_down_zone_classes(",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback username (?)", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_USERNAME,
/* value */"[email protected]",
/* description */"Valid username for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback Password", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_PASSWORD,
/* value */"customerpassword",
/* description */"Valid password for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Request URL", GestpayConstants.MODULE_PAYMENT_GESTPAY_REQUEST_URL,
/* value */"https://ecomm.sella.it/gestpay/pagam.asp",
/* description */"URL used by KonaKart to send the transaction details (usually https://ecomm.sella.it/gestpay/pagam.asp for production use, https://testecomm.sella.it/gestpay/pagam.asp for test mode)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer name", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_NAME,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer name",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer email", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_EMAIL,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer email",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
- /* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_LANGUAGE,
+ /* title */"Send buyer language", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_LANGUAGE,
/* value */ "false",
/* description */"True for multi-language sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
- /* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_CUSTOMINFO,
+ /* title */"Send custom info", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_CUSTOMINFO,
/* value */ "false",
/* description */"Depending on sella contract the payment server can accept custom info (i.e the billing country)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
- /* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_CURRENCY,
+ /* title */"Set currency", GestpayConstants.MODULE_PAYMENT_GESTPAY_CURRENCY,
/* value */ "false",
/* description */"True for multi currency sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"GESTPAY Shop Id", GestpayConstants.MODULE_PAYMENT_GESTPAY_SHOP_ID,
/* value */"GESPAY47826",
/* description */"The GESTPAY shop ID for this installation",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
return configs;
}
}
| false | true | public KKConfiguration[] getConfigs() {
if (configs == null) {
configs = new KKConfiguration[8];
}
if (configs[0] != null && !Utils.isBlank(configs[0].getConfigurationKey())) {
return configs;
}
Date now = KKAdminBase.getKonakartTimeStampDate();
int i = 0;
int groupId = 6;
configs[i] = new KKConfiguration(
/* title */"Gestpay Status", GestpayConstants.MODULE_PAYMENT_GESTPAY_STATUS,
/* value */"true",
/* description */"If set to false, the Gestpay module will be unavailable",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Sort order of display", GestpayConstants.MODULE_PAYMENT_GESTPAY_SORT_ORDER,
/* value */"0",
/* description */"Sort Order of Gestpay module on the UI. Lowest is displayed first.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Gestpay Payment Zone", GestpayConstants.MODULE_PAYMENT_GESTPAY_ZONE,
/* value */"0",
/* description */"Zone where Gestpay module can be used. Otherwise it is disabled.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"tep_get_zone_class_title",
/* setFun */"tep_cfg_pull_down_zone_classes(",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback username (?)", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_USERNAME,
/* value */"[email protected]",
/* description */"Valid username for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback Password", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_PASSWORD,
/* value */"customerpassword",
/* description */"Valid password for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Request URL", GestpayConstants.MODULE_PAYMENT_GESTPAY_REQUEST_URL,
/* value */"https://ecomm.sella.it/gestpay/pagam.asp",
/* description */"URL used by KonaKart to send the transaction details (usually https://ecomm.sella.it/gestpay/pagam.asp for production use, https://testecomm.sella.it/gestpay/pagam.asp for test mode)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer name", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_NAME,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer name",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer email", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_EMAIL,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer email",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_LANGUAGE,
/* value */ "false",
/* description */"True for multi-language sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_CUSTOMINFO,
/* value */ "false",
/* description */"Depending on sella contract the payment server can accept custom info (i.e the billing country)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Is the shop in test mode ?", GestpayConstants.MODULE_PAYMENT_GESTPAY_CURRENCY,
/* value */ "false",
/* description */"True for multi currency sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"GESTPAY Shop Id", GestpayConstants.MODULE_PAYMENT_GESTPAY_SHOP_ID,
/* value */"GESPAY47826",
/* description */"The GESTPAY shop ID for this installation",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
return configs;
}
| public KKConfiguration[] getConfigs() {
if (configs == null) {
configs = new KKConfiguration[8];
}
if (configs[0] != null && !Utils.isBlank(configs[0].getConfigurationKey())) {
return configs;
}
Date now = KKAdminBase.getKonakartTimeStampDate();
int i = 0;
int groupId = 6;
configs[i] = new KKConfiguration(
/* title */"Gestpay Status", GestpayConstants.MODULE_PAYMENT_GESTPAY_STATUS,
/* value */"true",
/* description */"If set to false, the Gestpay module will be unavailable",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Sort order of display", GestpayConstants.MODULE_PAYMENT_GESTPAY_SORT_ORDER,
/* value */"0",
/* description */"Sort Order of Gestpay module on the UI. Lowest is displayed first.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Gestpay Payment Zone", GestpayConstants.MODULE_PAYMENT_GESTPAY_ZONE,
/* value */"0",
/* description */"Zone where Gestpay module can be used. Otherwise it is disabled.",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"tep_get_zone_class_title",
/* setFun */"tep_cfg_pull_down_zone_classes(",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback username (?)", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_USERNAME,
/* value */"[email protected]",
/* description */"Valid username for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Callback Password", GestpayConstants.MODULE_PAYMENT_GESTPAY_CALLBACK_PASSWORD,
/* value */"customerpassword",
/* description */"Valid password for KonaKart. Used by the callback"
+ " code to log into KonaKart in order to make an engine call",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Request URL", GestpayConstants.MODULE_PAYMENT_GESTPAY_REQUEST_URL,
/* value */"https://ecomm.sella.it/gestpay/pagam.asp",
/* description */"URL used by KonaKart to send the transaction details (usually https://ecomm.sella.it/gestpay/pagam.asp for production use, https://testecomm.sella.it/gestpay/pagam.asp for test mode)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer name", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_NAME,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer name",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer email", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_EMAIL,
/* value */ "true",
/* description */"Depending on sella contract the payment server can accept the buyer email",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send buyer language", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_BUYER_LANGUAGE,
/* value */ "false",
/* description */"True for multi-language sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Send custom info", GestpayConstants.MODULE_PAYMENT_GESTPAY_SEND_CUSTOMINFO,
/* value */ "false",
/* description */"Depending on sella contract the payment server can accept custom info (i.e the billing country)",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"Set currency", GestpayConstants.MODULE_PAYMENT_GESTPAY_CURRENCY,
/* value */ "false",
/* description */"True for multi currency sella account",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"tep_cfg_select_option(array('true', 'false'), ",
/* dateAdd */now);
configs[i] = new KKConfiguration(
/* title */"GESTPAY Shop Id", GestpayConstants.MODULE_PAYMENT_GESTPAY_SHOP_ID,
/* value */"GESPAY47826",
/* description */"The GESTPAY shop ID for this installation",
/* groupId */groupId,
/* sortO */i++,
/* useFun */"",
/* setFun */"",
/* dateAdd */now);
return configs;
}
|
diff --git a/src/org/openstreetmap/josm/gui/dialogs/ToggleDialog.java b/src/org/openstreetmap/josm/gui/dialogs/ToggleDialog.java
index 44216fb9..c1d5d10b 100644
--- a/src/org/openstreetmap/josm/gui/dialogs/ToggleDialog.java
+++ b/src/org/openstreetmap/josm/gui/dialogs/ToggleDialog.java
@@ -1,266 +1,270 @@
// License: GPL. Copyright 2007 by Immanuel Scholz and others
package org.openstreetmap.josm.gui.dialogs;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.EventQueue;
import java.awt.GridBagLayout;
import java.awt.Component;
import java.awt.Image;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.AbstractButton;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.ImageIcon;
import org.openstreetmap.josm.Main;
import org.openstreetmap.josm.actions.JosmAction;
import org.openstreetmap.josm.actions.HelpAction.Helpful;
import org.openstreetmap.josm.tools.GBC;
import org.openstreetmap.josm.tools.ImageProvider;
import org.openstreetmap.josm.tools.Shortcut;
/**
* This class is a toggle dialog that can be turned on and off. It is attached
* to a ButtonModel.
*
* @author imi
*/
public class ToggleDialog extends JPanel implements Helpful {
public final class ToggleDialogAction extends JosmAction {
public final String prefname;
public AbstractButton button;
private ToggleDialogAction(String name, String iconName, String tooltip, Shortcut shortcut, String prefname) {
super(name, iconName, tooltip, shortcut, false);
this.prefname = prefname;
}
public void actionPerformed(ActionEvent e) {
if (e != null && !(e.getSource() instanceof AbstractButton)) {
button.setSelected(!button.isSelected());
}
Boolean selected = button.isSelected();
setVisible(selected);
Main.pref.put(prefname+".visible", selected);
if(!selected && winadapter != null) {
winadapter.windowClosing(null);
} else if (!Main.pref.getBoolean(action.prefname+".docked", true)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
stickyActionListener.actionPerformed(null);
}
});
}
}
}
/**
* The action to toggle this dialog.
*/
public ToggleDialogAction action;
public final String prefName;
public JPanel parent;
WindowAdapter winadapter;
private ActionListener stickyActionListener;
private final JPanel titleBar = new JPanel(new GridBagLayout());
public JLabel label = new JLabel();
public ToggleDialog(final String name, String iconName, String tooltip, Shortcut shortcut, int preferredHeight) {
super(new BorderLayout());
this.prefName = iconName;
ToggleDialogInit(name, iconName, tooltip, shortcut, preferredHeight);
}
private void ToggleDialogInit(final String name, String iconName, String tooltip, Shortcut shortcut, final int preferredHeight) {
setPreferredSize(new Dimension(330,preferredHeight));
action = new ToggleDialogAction(name, "dialogs/"+iconName, tooltip, shortcut, iconName);
String helpId = "Dialog/"+getClass().getName().substring(getClass().getName().lastIndexOf('.')+1);
action.putValue("help", helpId.substring(0, helpId.length()-6));
setLayout(new BorderLayout());
// show the minimize button
final JLabel minimize = new JLabel(ImageProvider.get("misc", "normal"));
titleBar.add(minimize);
// scale down the dialog icon
ImageIcon inIcon = ImageProvider.get("dialogs", iconName);
ImageIcon smallIcon = new ImageIcon(inIcon.getImage().getScaledInstance(16 , 16, Image.SCALE_SMOOTH));
JLabel firstPart = new JLabel(name, smallIcon, JLabel.TRAILING);
firstPart.setIconTextGap(8);
titleBar.add(firstPart, GBC.std());
titleBar.add(Box.createHorizontalGlue(),GBC.std().fill(GBC.HORIZONTAL));
final ActionListener hideActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
boolean nowVisible = false;
Component comps[] = getComponents();
for(int i=0; i<comps.length; i++)
{
if(comps[i] != titleBar)
{
if(comps[i].isVisible()) {
comps[i].setVisible(false);
} else {
comps[i].setVisible(true);
nowVisible = true;
}
}
}
Main.pref.put(action.prefname+".minimized", !nowVisible);
if(nowVisible == true) {
setPreferredSize(new Dimension(330,preferredHeight));
setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE));
minimize.setIcon(ImageProvider.get("misc", "normal"));
} else {
setPreferredSize(new Dimension(330,20));
setMaximumSize(new Dimension(330,20));
minimize.setIcon(ImageProvider.get("misc", "minimized"));
}
if(parent != null)
{
// doLayout() - workaround
parent.setVisible(false);
parent.setVisible(true);
}
}
};
//hide.addActionListener(hideActionListener);
final MouseListener titleMouseListener = new MouseListener(){
public void mouseClicked(MouseEvent e) {
hideActionListener.actionPerformed(null);
}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
};
titleBar.addMouseListener(titleMouseListener);
// show the sticky button
JButton sticky = new JButton(ImageProvider.get("misc", "sticky"));
sticky.setToolTipText(tr("Undock the panel"));
sticky.setBorder(BorderFactory.createEmptyBorder());
stickyActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
final JDialog f = new JDialog(JOptionPane.getFrameForComponent(Main.parent),false /* not modal*/);
- parent.remove(ToggleDialog.this);
+ if (parent != null) {
+ parent.remove(ToggleDialog.this);
+ }
f.getContentPane().add(ToggleDialog.this);
f.addWindowListener((winadapter = new WindowAdapter(){
@Override public void windowClosing(WindowEvent e) {
f.getContentPane().removeAll();
f.dispose();
winadapter = null;
// doLayout() - workaround
setVisible(false);
parent.add(ToggleDialog.this);
if(Main.pref.getBoolean(action.prefname+".visible")) {
setVisible(true);
}
titleBar.setVisible(true);
if(e != null) {
Main.pref.put(action.prefname+".docked", true);
}
}
}));
f.addComponentListener(new ComponentAdapter(){
@Override public void componentMoved(ComponentEvent e) {
Main.pref.put(action.prefname+".bounds", f.getX()+","+f.getY()+","+f.getWidth()+","+f.getHeight());
}
});
String bounds = Main.pref.get(action.prefname+".bounds",null);
if (bounds != null) {
String[] b = bounds.split(",");
f.setBounds(Integer.parseInt(b[0]),Integer.parseInt(b[1]),Integer.parseInt(b[2]),Integer.parseInt(b[3]));
} else {
f.pack();
}
Main.pref.put(action.prefname+".docked", false);
f.setVisible(true);
titleBar.setVisible(false);
- // doLayout() - workaround
- parent.setVisible(false);
- parent.setVisible(true);
+ if (parent != null) {
+ // doLayout() - workaround
+ parent.setVisible(false);
+ parent.setVisible(true);
+ }
}
};
sticky.addActionListener(stickyActionListener);
titleBar.add(sticky);
// show the close button
JButton close = new JButton(ImageProvider.get("misc", "close"));
close.setToolTipText(tr("Close this panel. You can reopen it with the buttons in the left toolbar."));
close.setBorder(BorderFactory.createEmptyBorder());
final ActionListener closeActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
// fake an event to toggle dialog
action.actionPerformed(new ActionEvent(titleBar, 0, ""));
}
};
close.addActionListener(closeActionListener);
titleBar.add(close);
add(titleBar, BorderLayout.NORTH);
titleBar.setToolTipText(tr("Click to minimize/maximize the panel content"));
setVisible(false);
setBorder(BorderFactory.createEtchedBorder());
if (!Main.pref.getBoolean(action.prefname+".docked", true)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
stickyActionListener.actionPerformed(null);
}
});
}
if (Main.pref.getBoolean(action.prefname+".minimized", false)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
titleMouseListener.mouseClicked(null);
}
});
}
}
public void close()
{
if(winadapter != null) {
winadapter.windowClosing(null);
}
}
public void setTitle(String title, boolean active) {
if(active) {
label.setText("<html><b>" + title + "</b>");
} else {
label.setText(title);
}
}
public String helpTopic() {
String help = getClass().getName();
help = help.substring(help.lastIndexOf('.')+1, help.length()-6);
return "Dialog/"+help;
}
}
| false | true | private void ToggleDialogInit(final String name, String iconName, String tooltip, Shortcut shortcut, final int preferredHeight) {
setPreferredSize(new Dimension(330,preferredHeight));
action = new ToggleDialogAction(name, "dialogs/"+iconName, tooltip, shortcut, iconName);
String helpId = "Dialog/"+getClass().getName().substring(getClass().getName().lastIndexOf('.')+1);
action.putValue("help", helpId.substring(0, helpId.length()-6));
setLayout(new BorderLayout());
// show the minimize button
final JLabel minimize = new JLabel(ImageProvider.get("misc", "normal"));
titleBar.add(minimize);
// scale down the dialog icon
ImageIcon inIcon = ImageProvider.get("dialogs", iconName);
ImageIcon smallIcon = new ImageIcon(inIcon.getImage().getScaledInstance(16 , 16, Image.SCALE_SMOOTH));
JLabel firstPart = new JLabel(name, smallIcon, JLabel.TRAILING);
firstPart.setIconTextGap(8);
titleBar.add(firstPart, GBC.std());
titleBar.add(Box.createHorizontalGlue(),GBC.std().fill(GBC.HORIZONTAL));
final ActionListener hideActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
boolean nowVisible = false;
Component comps[] = getComponents();
for(int i=0; i<comps.length; i++)
{
if(comps[i] != titleBar)
{
if(comps[i].isVisible()) {
comps[i].setVisible(false);
} else {
comps[i].setVisible(true);
nowVisible = true;
}
}
}
Main.pref.put(action.prefname+".minimized", !nowVisible);
if(nowVisible == true) {
setPreferredSize(new Dimension(330,preferredHeight));
setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE));
minimize.setIcon(ImageProvider.get("misc", "normal"));
} else {
setPreferredSize(new Dimension(330,20));
setMaximumSize(new Dimension(330,20));
minimize.setIcon(ImageProvider.get("misc", "minimized"));
}
if(parent != null)
{
// doLayout() - workaround
parent.setVisible(false);
parent.setVisible(true);
}
}
};
//hide.addActionListener(hideActionListener);
final MouseListener titleMouseListener = new MouseListener(){
public void mouseClicked(MouseEvent e) {
hideActionListener.actionPerformed(null);
}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
};
titleBar.addMouseListener(titleMouseListener);
// show the sticky button
JButton sticky = new JButton(ImageProvider.get("misc", "sticky"));
sticky.setToolTipText(tr("Undock the panel"));
sticky.setBorder(BorderFactory.createEmptyBorder());
stickyActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
final JDialog f = new JDialog(JOptionPane.getFrameForComponent(Main.parent),false /* not modal*/);
parent.remove(ToggleDialog.this);
f.getContentPane().add(ToggleDialog.this);
f.addWindowListener((winadapter = new WindowAdapter(){
@Override public void windowClosing(WindowEvent e) {
f.getContentPane().removeAll();
f.dispose();
winadapter = null;
// doLayout() - workaround
setVisible(false);
parent.add(ToggleDialog.this);
if(Main.pref.getBoolean(action.prefname+".visible")) {
setVisible(true);
}
titleBar.setVisible(true);
if(e != null) {
Main.pref.put(action.prefname+".docked", true);
}
}
}));
f.addComponentListener(new ComponentAdapter(){
@Override public void componentMoved(ComponentEvent e) {
Main.pref.put(action.prefname+".bounds", f.getX()+","+f.getY()+","+f.getWidth()+","+f.getHeight());
}
});
String bounds = Main.pref.get(action.prefname+".bounds",null);
if (bounds != null) {
String[] b = bounds.split(",");
f.setBounds(Integer.parseInt(b[0]),Integer.parseInt(b[1]),Integer.parseInt(b[2]),Integer.parseInt(b[3]));
} else {
f.pack();
}
Main.pref.put(action.prefname+".docked", false);
f.setVisible(true);
titleBar.setVisible(false);
// doLayout() - workaround
parent.setVisible(false);
parent.setVisible(true);
}
};
sticky.addActionListener(stickyActionListener);
titleBar.add(sticky);
// show the close button
JButton close = new JButton(ImageProvider.get("misc", "close"));
close.setToolTipText(tr("Close this panel. You can reopen it with the buttons in the left toolbar."));
close.setBorder(BorderFactory.createEmptyBorder());
final ActionListener closeActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
// fake an event to toggle dialog
action.actionPerformed(new ActionEvent(titleBar, 0, ""));
}
};
close.addActionListener(closeActionListener);
titleBar.add(close);
add(titleBar, BorderLayout.NORTH);
titleBar.setToolTipText(tr("Click to minimize/maximize the panel content"));
setVisible(false);
setBorder(BorderFactory.createEtchedBorder());
if (!Main.pref.getBoolean(action.prefname+".docked", true)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
stickyActionListener.actionPerformed(null);
}
});
}
if (Main.pref.getBoolean(action.prefname+".minimized", false)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
titleMouseListener.mouseClicked(null);
}
});
}
}
| private void ToggleDialogInit(final String name, String iconName, String tooltip, Shortcut shortcut, final int preferredHeight) {
setPreferredSize(new Dimension(330,preferredHeight));
action = new ToggleDialogAction(name, "dialogs/"+iconName, tooltip, shortcut, iconName);
String helpId = "Dialog/"+getClass().getName().substring(getClass().getName().lastIndexOf('.')+1);
action.putValue("help", helpId.substring(0, helpId.length()-6));
setLayout(new BorderLayout());
// show the minimize button
final JLabel minimize = new JLabel(ImageProvider.get("misc", "normal"));
titleBar.add(minimize);
// scale down the dialog icon
ImageIcon inIcon = ImageProvider.get("dialogs", iconName);
ImageIcon smallIcon = new ImageIcon(inIcon.getImage().getScaledInstance(16 , 16, Image.SCALE_SMOOTH));
JLabel firstPart = new JLabel(name, smallIcon, JLabel.TRAILING);
firstPart.setIconTextGap(8);
titleBar.add(firstPart, GBC.std());
titleBar.add(Box.createHorizontalGlue(),GBC.std().fill(GBC.HORIZONTAL));
final ActionListener hideActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
boolean nowVisible = false;
Component comps[] = getComponents();
for(int i=0; i<comps.length; i++)
{
if(comps[i] != titleBar)
{
if(comps[i].isVisible()) {
comps[i].setVisible(false);
} else {
comps[i].setVisible(true);
nowVisible = true;
}
}
}
Main.pref.put(action.prefname+".minimized", !nowVisible);
if(nowVisible == true) {
setPreferredSize(new Dimension(330,preferredHeight));
setMaximumSize(new Dimension(Short.MAX_VALUE, Short.MAX_VALUE));
minimize.setIcon(ImageProvider.get("misc", "normal"));
} else {
setPreferredSize(new Dimension(330,20));
setMaximumSize(new Dimension(330,20));
minimize.setIcon(ImageProvider.get("misc", "minimized"));
}
if(parent != null)
{
// doLayout() - workaround
parent.setVisible(false);
parent.setVisible(true);
}
}
};
//hide.addActionListener(hideActionListener);
final MouseListener titleMouseListener = new MouseListener(){
public void mouseClicked(MouseEvent e) {
hideActionListener.actionPerformed(null);
}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
};
titleBar.addMouseListener(titleMouseListener);
// show the sticky button
JButton sticky = new JButton(ImageProvider.get("misc", "sticky"));
sticky.setToolTipText(tr("Undock the panel"));
sticky.setBorder(BorderFactory.createEmptyBorder());
stickyActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
final JDialog f = new JDialog(JOptionPane.getFrameForComponent(Main.parent),false /* not modal*/);
if (parent != null) {
parent.remove(ToggleDialog.this);
}
f.getContentPane().add(ToggleDialog.this);
f.addWindowListener((winadapter = new WindowAdapter(){
@Override public void windowClosing(WindowEvent e) {
f.getContentPane().removeAll();
f.dispose();
winadapter = null;
// doLayout() - workaround
setVisible(false);
parent.add(ToggleDialog.this);
if(Main.pref.getBoolean(action.prefname+".visible")) {
setVisible(true);
}
titleBar.setVisible(true);
if(e != null) {
Main.pref.put(action.prefname+".docked", true);
}
}
}));
f.addComponentListener(new ComponentAdapter(){
@Override public void componentMoved(ComponentEvent e) {
Main.pref.put(action.prefname+".bounds", f.getX()+","+f.getY()+","+f.getWidth()+","+f.getHeight());
}
});
String bounds = Main.pref.get(action.prefname+".bounds",null);
if (bounds != null) {
String[] b = bounds.split(",");
f.setBounds(Integer.parseInt(b[0]),Integer.parseInt(b[1]),Integer.parseInt(b[2]),Integer.parseInt(b[3]));
} else {
f.pack();
}
Main.pref.put(action.prefname+".docked", false);
f.setVisible(true);
titleBar.setVisible(false);
if (parent != null) {
// doLayout() - workaround
parent.setVisible(false);
parent.setVisible(true);
}
}
};
sticky.addActionListener(stickyActionListener);
titleBar.add(sticky);
// show the close button
JButton close = new JButton(ImageProvider.get("misc", "close"));
close.setToolTipText(tr("Close this panel. You can reopen it with the buttons in the left toolbar."));
close.setBorder(BorderFactory.createEmptyBorder());
final ActionListener closeActionListener = new ActionListener(){
public void actionPerformed(ActionEvent e) {
// fake an event to toggle dialog
action.actionPerformed(new ActionEvent(titleBar, 0, ""));
}
};
close.addActionListener(closeActionListener);
titleBar.add(close);
add(titleBar, BorderLayout.NORTH);
titleBar.setToolTipText(tr("Click to minimize/maximize the panel content"));
setVisible(false);
setBorder(BorderFactory.createEtchedBorder());
if (!Main.pref.getBoolean(action.prefname+".docked", true)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
stickyActionListener.actionPerformed(null);
}
});
}
if (Main.pref.getBoolean(action.prefname+".minimized", false)) {
EventQueue.invokeLater(new Runnable(){
public void run() {
titleMouseListener.mouseClicked(null);
}
});
}
}
|
diff --git a/gr.sch.ira.minoas/src/gr/sch/ira/minoas/seam/components/startup/SecurityStartup.java b/gr.sch.ira.minoas/src/gr/sch/ira/minoas/seam/components/startup/SecurityStartup.java
index 2600b033..7a3fd5ff 100644
--- a/gr.sch.ira.minoas/src/gr/sch/ira/minoas/seam/components/startup/SecurityStartup.java
+++ b/gr.sch.ira.minoas/src/gr/sch/ira/minoas/seam/components/startup/SecurityStartup.java
@@ -1,140 +1,141 @@
package gr.sch.ira.minoas.seam.components.startup;
import gr.sch.ira.minoas.model.security.Principal;
import gr.sch.ira.minoas.model.security.Role;
import gr.sch.ira.minoas.seam.components.BaseDatabaseAwareSeamComponent;
import javax.persistence.NoResultException;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.Create;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.annotations.Startup;
import org.jboss.seam.annotations.TransactionPropagationType;
import org.jboss.seam.annotations.Transactional;
/**
* @author <a href="mailto:[email protected]">Filippos Slavik</a>
* @version $Id$
*/
@Startup()
@Scope(ScopeType.APPLICATION)
@Name("SecurityStartup")
public class SecurityStartup extends BaseDatabaseAwareSeamComponent {
/**
* Comment for <code>serialVersionUID</code>
*/
private static final long serialVersionUID = 1L;
protected Principal getPrincipal(String principalName) {
try {
return (Principal) getEntityManager().createQuery("SELECT p FROM Principal p WHERE p.username = :username")
.setParameter("username", principalName).getSingleResult();
} catch (NoResultException nre) {
return null;
}
}
protected Role getRole(String roleName) {
try {
return (Role) getEntityManager().createQuery("SELECT r FROM Role r WHERE r.name = :roleName").setParameter(
"roleName", roleName).getSingleResult();
} catch (NoResultException nre) {
return null;
}
}
protected Principal createPrincipal(String username, String password, String realname) {
Principal principal = getPrincipal(username);
if (principal == null) {
principal = new Principal(username, password, realname);
getEntityManager().persist(principal);
} else {
info("principal \"#0\" already exists in the database", username);
}
return principal;
}
protected Role createRole(String name, String desc, Principal creator) {
Role role = getRole(name);
if (role == null) {
role = new Role(name, desc);
role.setInsertedBy(creator);
getEntityManager().persist(role);
info("successfully created role \"#0\" with description \"#1\" in database.", role.getName(), role
.getDescription());
} else {
info("role \"#0\" already exists in the database.", name);
}
return role;
}
@Create
@Transactional(TransactionPropagationType.REQUIRED)
public void init() {
info("initializing security model");
Principal admin = createPrincipal("admin", "admin", "Ο Θέος");
Role adminRole = createRole("ADMIN", "Admin Role - The god him self", admin);
admin.addRole(adminRole);
/* Employee */
createRole("ADD_EMPLOYEE", "Add New Employee", admin);
createRole("ADD_EMPLOYEE_HOURLY_BASED", "Add New Hourly Based Employee", admin);
createRole("ADD_EMPLOYEE_DEPUTY", "Add New Deputy Employee", admin);
createRole("ADD_EMPLOYEE_REGULAR", "Add New Regular Employee", admin);
createRole("MANAGE_EMPLOYEE", "Manage Employee", admin);
createRole("MANAGE_EMPLOYMENT", "Manage Employment", admin);
createRole("MANAGE_EMPLOYMENT_HOURLY_BASED", "Manage Hourly Based Employment", admin);
createRole("MANAGE_EMPLOYMENT_DEPUTY", "Manage Deputy Employment", admin);
createRole("MANAGE_EMPLOYMENT_REGULAR", "Manage Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_REGULAR", "Delete Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_DEUPTY", "Delete Deputy Employment", admin);
createRole("VIEW_EMPLOYEE", "View Employee", admin);
createRole("VIEW_PREPARATORY", "View Preparatory Data", admin);
createRole("MANAGE_PREPARATORY_OWNER", "Manage Preperatory Owner", admin);
createRole("MANAGE_PREPARATORY_EST_LICENSE", "Manage Preparatory Establishment License", admin);
createRole("MANAGE_SPECIALIZATION_GROUP", "Manage Specialization Groups", admin);
/* Secondments */
createRole("ADD_SECONDMENT", "Add New Secondments", admin);
createRole("MANAGE_SECONDMENT", "Manage Secondment", admin);
createRole("VIEW_SECONDMENT", "View Secondment", admin);
/* Service Allocation */
createRole("ADD_SERVICE_ALLOCATION", "Add New Service Allocation", admin);
createRole("MANAGE_SERVICE_ALLOCATION", "Manage Service Allocation", admin);
createRole("VIEW_SERVICE_ALLOCATION", "View Service Allocation", admin);
/* Leave */
createRole("ADD_LEAVE", "Add New Leave", admin);
createRole("MANAGE_LEAVE", "Manage Leave", admin);
createRole("VIEW_LEAVE", "View Leave", admin);
createRole("PRINT_LEAVE", "Print Leave", admin);
+ createRole("DELETE_LEAVE", "Delete Leave", admin);
/* Disposal */
createRole("ADD_DISPOSAL", "Add New Disposal", admin);
createRole("MANAGE_DISPOSAL", "Manage Disposal", admin);
createRole("VIEW_DISPOSAL", "View Disposal", admin);
/* Reports */
createRole("VIEW_SCHOOL_REPORT", "View School Oriented Reports", admin);
/* School Unit */
createRole("MANAGE_SCHOOL_BASIC", "Basic School Unit Management", admin);
/* Improvements */
createRole("MANAGE_IMPROVEMENTS", "Manage Improvements", admin);
createRole("VIEW_IMPROVEMENTS", "View Improvements", admin);
/* Permanent Transfers */
createRole("VIEW_PERMANENT_TRANSFERS", "View Permanent Transfers", admin);
createRole("MANAGE_PERMANENT_TRANSFERS", "Manage Permanent Transfers", admin);
}
}
| true | true | public void init() {
info("initializing security model");
Principal admin = createPrincipal("admin", "admin", "Ο Θέος");
Role adminRole = createRole("ADMIN", "Admin Role - The god him self", admin);
admin.addRole(adminRole);
/* Employee */
createRole("ADD_EMPLOYEE", "Add New Employee", admin);
createRole("ADD_EMPLOYEE_HOURLY_BASED", "Add New Hourly Based Employee", admin);
createRole("ADD_EMPLOYEE_DEPUTY", "Add New Deputy Employee", admin);
createRole("ADD_EMPLOYEE_REGULAR", "Add New Regular Employee", admin);
createRole("MANAGE_EMPLOYEE", "Manage Employee", admin);
createRole("MANAGE_EMPLOYMENT", "Manage Employment", admin);
createRole("MANAGE_EMPLOYMENT_HOURLY_BASED", "Manage Hourly Based Employment", admin);
createRole("MANAGE_EMPLOYMENT_DEPUTY", "Manage Deputy Employment", admin);
createRole("MANAGE_EMPLOYMENT_REGULAR", "Manage Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_REGULAR", "Delete Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_DEUPTY", "Delete Deputy Employment", admin);
createRole("VIEW_EMPLOYEE", "View Employee", admin);
createRole("VIEW_PREPARATORY", "View Preparatory Data", admin);
createRole("MANAGE_PREPARATORY_OWNER", "Manage Preperatory Owner", admin);
createRole("MANAGE_PREPARATORY_EST_LICENSE", "Manage Preparatory Establishment License", admin);
createRole("MANAGE_SPECIALIZATION_GROUP", "Manage Specialization Groups", admin);
/* Secondments */
createRole("ADD_SECONDMENT", "Add New Secondments", admin);
createRole("MANAGE_SECONDMENT", "Manage Secondment", admin);
createRole("VIEW_SECONDMENT", "View Secondment", admin);
/* Service Allocation */
createRole("ADD_SERVICE_ALLOCATION", "Add New Service Allocation", admin);
createRole("MANAGE_SERVICE_ALLOCATION", "Manage Service Allocation", admin);
createRole("VIEW_SERVICE_ALLOCATION", "View Service Allocation", admin);
/* Leave */
createRole("ADD_LEAVE", "Add New Leave", admin);
createRole("MANAGE_LEAVE", "Manage Leave", admin);
createRole("VIEW_LEAVE", "View Leave", admin);
createRole("PRINT_LEAVE", "Print Leave", admin);
/* Disposal */
createRole("ADD_DISPOSAL", "Add New Disposal", admin);
createRole("MANAGE_DISPOSAL", "Manage Disposal", admin);
createRole("VIEW_DISPOSAL", "View Disposal", admin);
/* Reports */
createRole("VIEW_SCHOOL_REPORT", "View School Oriented Reports", admin);
/* School Unit */
createRole("MANAGE_SCHOOL_BASIC", "Basic School Unit Management", admin);
/* Improvements */
createRole("MANAGE_IMPROVEMENTS", "Manage Improvements", admin);
createRole("VIEW_IMPROVEMENTS", "View Improvements", admin);
/* Permanent Transfers */
createRole("VIEW_PERMANENT_TRANSFERS", "View Permanent Transfers", admin);
createRole("MANAGE_PERMANENT_TRANSFERS", "Manage Permanent Transfers", admin);
}
| public void init() {
info("initializing security model");
Principal admin = createPrincipal("admin", "admin", "Ο Θέος");
Role adminRole = createRole("ADMIN", "Admin Role - The god him self", admin);
admin.addRole(adminRole);
/* Employee */
createRole("ADD_EMPLOYEE", "Add New Employee", admin);
createRole("ADD_EMPLOYEE_HOURLY_BASED", "Add New Hourly Based Employee", admin);
createRole("ADD_EMPLOYEE_DEPUTY", "Add New Deputy Employee", admin);
createRole("ADD_EMPLOYEE_REGULAR", "Add New Regular Employee", admin);
createRole("MANAGE_EMPLOYEE", "Manage Employee", admin);
createRole("MANAGE_EMPLOYMENT", "Manage Employment", admin);
createRole("MANAGE_EMPLOYMENT_HOURLY_BASED", "Manage Hourly Based Employment", admin);
createRole("MANAGE_EMPLOYMENT_DEPUTY", "Manage Deputy Employment", admin);
createRole("MANAGE_EMPLOYMENT_REGULAR", "Manage Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_REGULAR", "Delete Regular Employment", admin);
createRole("DELETE_EMPLOYMENT_DEUPTY", "Delete Deputy Employment", admin);
createRole("VIEW_EMPLOYEE", "View Employee", admin);
createRole("VIEW_PREPARATORY", "View Preparatory Data", admin);
createRole("MANAGE_PREPARATORY_OWNER", "Manage Preperatory Owner", admin);
createRole("MANAGE_PREPARATORY_EST_LICENSE", "Manage Preparatory Establishment License", admin);
createRole("MANAGE_SPECIALIZATION_GROUP", "Manage Specialization Groups", admin);
/* Secondments */
createRole("ADD_SECONDMENT", "Add New Secondments", admin);
createRole("MANAGE_SECONDMENT", "Manage Secondment", admin);
createRole("VIEW_SECONDMENT", "View Secondment", admin);
/* Service Allocation */
createRole("ADD_SERVICE_ALLOCATION", "Add New Service Allocation", admin);
createRole("MANAGE_SERVICE_ALLOCATION", "Manage Service Allocation", admin);
createRole("VIEW_SERVICE_ALLOCATION", "View Service Allocation", admin);
/* Leave */
createRole("ADD_LEAVE", "Add New Leave", admin);
createRole("MANAGE_LEAVE", "Manage Leave", admin);
createRole("VIEW_LEAVE", "View Leave", admin);
createRole("PRINT_LEAVE", "Print Leave", admin);
createRole("DELETE_LEAVE", "Delete Leave", admin);
/* Disposal */
createRole("ADD_DISPOSAL", "Add New Disposal", admin);
createRole("MANAGE_DISPOSAL", "Manage Disposal", admin);
createRole("VIEW_DISPOSAL", "View Disposal", admin);
/* Reports */
createRole("VIEW_SCHOOL_REPORT", "View School Oriented Reports", admin);
/* School Unit */
createRole("MANAGE_SCHOOL_BASIC", "Basic School Unit Management", admin);
/* Improvements */
createRole("MANAGE_IMPROVEMENTS", "Manage Improvements", admin);
createRole("VIEW_IMPROVEMENTS", "View Improvements", admin);
/* Permanent Transfers */
createRole("VIEW_PERMANENT_TRANSFERS", "View Permanent Transfers", admin);
createRole("MANAGE_PERMANENT_TRANSFERS", "Manage Permanent Transfers", admin);
}
|
diff --git a/plugins/org.eclipse.acceleo.engine/src/org/eclipse/acceleo/engine/service/AcceleoService.java b/plugins/org.eclipse.acceleo.engine/src/org/eclipse/acceleo/engine/service/AcceleoService.java
index ac9e673d..a591696c 100644
--- a/plugins/org.eclipse.acceleo.engine/src/org/eclipse/acceleo/engine/service/AcceleoService.java
+++ b/plugins/org.eclipse.acceleo.engine/src/org/eclipse/acceleo/engine/service/AcceleoService.java
@@ -1,881 +1,881 @@
/*******************************************************************************
* Copyright (c) 2008, 2011 Obeo.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Obeo - initial API and implementation
*******************************************************************************/
package org.eclipse.acceleo.engine.service;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Set;
import org.eclipse.acceleo.common.preference.AcceleoPreferences;
import org.eclipse.acceleo.common.utils.CompactHashSet;
import org.eclipse.acceleo.engine.AcceleoEngineMessages;
import org.eclipse.acceleo.engine.AcceleoEnginePlugin;
import org.eclipse.acceleo.engine.AcceleoEvaluationException;
import org.eclipse.acceleo.engine.event.AbstractAcceleoTextGenerationListener;
import org.eclipse.acceleo.engine.event.IAcceleoTextGenerationListener;
import org.eclipse.acceleo.engine.generation.AcceleoEngine;
import org.eclipse.acceleo.engine.generation.IAcceleoEngine;
import org.eclipse.acceleo.engine.generation.strategy.DefaultStrategy;
import org.eclipse.acceleo.engine.generation.strategy.IAcceleoGenerationStrategy;
import org.eclipse.acceleo.engine.generation.strategy.PreviewStrategy;
import org.eclipse.acceleo.engine.internal.utils.AcceleoEngineRegistry;
import org.eclipse.acceleo.engine.internal.utils.AcceleoListenerDescriptor;
import org.eclipse.acceleo.engine.internal.utils.AcceleoTraceabilityRegistryListenerUils;
import org.eclipse.acceleo.engine.internal.utils.DefaultEngineSelector;
import org.eclipse.acceleo.model.mtl.Module;
import org.eclipse.acceleo.model.mtl.ModuleElement;
import org.eclipse.acceleo.model.mtl.Template;
import org.eclipse.acceleo.model.mtl.VisibilityKind;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.eclipse.emf.common.EMFPlugin;
import org.eclipse.emf.common.util.Monitor;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.ecore.EClassifier;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
/**
* This class provides utility methods to launch the generation of an Acceleo template.
*
* @author <a href="mailto:[email protected]">Laurent Goubet</a>
*/
public final class AcceleoService {
/** This list will hold listeners that are to be used for any and all generations. */
private static final List<IAcceleoTextGenerationListener> STATIC_LISTENERS = new ArrayList<IAcceleoTextGenerationListener>();
/** This message will be set for all NPE thrown because of null arguments for this utility's methods. */
private static final String TEMPLATE_CALL_NPE = AcceleoEngineMessages
.getString("AcceleoService.NullArguments"); //$NON-NLS-1$
/** The engine we'll use for all generations through this service instance. */
private IAcceleoEngine generationEngine;
/** The current generation strategy. */
private final IAcceleoGenerationStrategy strategy;
/**
* The listeners added to the initial engine.
*
* @since 3.1
**/
private final List<IAcceleoTextGenerationListener> addedListeners = new ArrayList<IAcceleoTextGenerationListener>();
/**
* The properties files added to the initial engine.
*
* @since 3.1
**/
private final List<String> addedPropertiesfiles = new ArrayList<String>();
/**
* The properties added to the initial engine.
*
* @since 3.1
**/
private final List<Map<String, String>> addedProperties = new ArrayList<Map<String, String>>();
/**
* The generation ID.
*/
private String generationID;
/**
* Indicates if we should deactivate the traceability.
*/
private boolean deactivateTraceability;
/**
* Instantiates an instance of the service with a default generation strategy.
*
* @since 3.0
*/
public AcceleoService() {
this(null);
}
/**
* Instantiates an instance of the service given the generation strategy that is to be used for this
* generation.
*
* @param generationStrategy
* Generation strategy that'll be used for this generation.
* @since 3.0
*/
public AcceleoService(IAcceleoGenerationStrategy generationStrategy) {
if (generationStrategy == null) {
strategy = new DefaultStrategy();
} else {
strategy = generationStrategy;
}
createEngine();
}
/**
* Registers a listener to be notified for any text generation. This will have to be removed manually
* through {@link #removeListener(IAcceleoTextGenerationListener)} if the listeners mustn't be used for a
* given generation.
*
* @param listener
* The new listener that is to be registered for notification.
* @since 3.0
*/
public static void addStaticListener(IAcceleoTextGenerationListener listener) {
STATIC_LISTENERS.add(listener);
}
/**
* Removes a static listener from the notification loops.
*
* @param listener
* The listener that is to be removed from the notification loops.
* @since 3.0
*/
public static void removeStaticListener(IAcceleoTextGenerationListener listener) {
STATIC_LISTENERS.remove(listener);
}
/**
* Registers a listener to be notified for any text generation that will take place in this engine
* evaluation process.
*
* @param listener
* The new listener that is to be registered for notification.
* @since 0.8
*/
public void addListener(IAcceleoTextGenerationListener listener) {
generationEngine.addListener(listener);
this.addedListeners.add(listener);
}
/**
* This will add custom key/value pairs to the generation context so that they can be accessed through the
* getProperty() services at generation time.
* <p>
* <b>Note</b> that such properties always take precedence over properties defined in a properties file.
* </p>
*
* @param customProperties
* key/value pairs that are to be added to the generation context.
* @since 3.0
*/
public void addProperties(Map<String, String> customProperties) {
generationEngine.addProperties(customProperties);
this.addedProperties.add(customProperties);
}
/**
* Adds the given properties file to the generation context so that its key/value pairs can be accessed
* through the getProperty() services at generation time.
* <p>
* <b>Note</b> that the first properties file added to this list will take precedence over subsequent
* ones.
* </p>
* <p>
* The given path can be either absolute or relative. If it represent an URI of platform scheme, we'll
* resolve this path against the current workspace.
* </p>
* <p>
* For example, if plugin A adds "a.properties" which contains a key "a.b.c" and calls a launcher
* contained by a second plugin B which itself contains "b.properties" containing key "a.b.c" :
*
* <pre>
* getProperty('a.b.c')
* </pre>
*
* will result in the value from a.properties being printed, whereas
*
* <pre>
* getProperty('b.properties', 'a.b.c')
* </pre>
*
* will return the value from b.properties.
* </p>
* <p>
* Take note that properties added through {@link #addProperties(Map)} will always take precedence over
* properties defined in a file.
* </p>
*
* @param propertiesFile
* Qualified path to the properties file that is to be added to the generation context.
* @throws MissingResourceException
* This will be thrown if we cannot locate the properties file in the current classpath.
* @since 3.0
*/
public void addPropertiesFile(String propertiesFile) throws MissingResourceException {
generationEngine.addProperties(propertiesFile);
this.addedPropertiesfiles.add(propertiesFile);
}
/**
* Properly disposes of everything that could have been loaded from this service.
*
* @deprecated This has no real use.
* @since 3.0
*/
@Deprecated
public void dispose() {
// empty implementation
}
/**
* This can be used to launch the generation of multiple Acceleo templates given their names and their
* containing modules.
* <p>
* Keep in mind that this can only be used with single-argument templates. Any attempt to call to a
* template with more than one argument through this method will throw {@link AcceleoEvaluationException}
* s.
* </p>
* <p>
* The input model will be iterated over for objects matching the templates' parameter types.
* </p>
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param templates
* This map will be used to locate templates of the given names in the associated module.
* @param model
* Input model for this generation.
* @param generationRoot
* This will be used as the root for the generated files. Cannot be <code>null</code> except if
* <code>preview</code> is <code>true</code> in which case no files will be generated.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.0
*/
public Map<String, String> doGenerate(Map<Module, Set<String>> templates, EObject model,
File generationRoot, Monitor monitor) {
if (templates == null || model == null
|| (!(strategy instanceof PreviewStrategy) && generationRoot == null)) {
throw new NullPointerException(TEMPLATE_CALL_NPE);
}
Map<EClassifier, Set<Template>> templateTypes = new HashMap<EClassifier, Set<Template>>();
for (Map.Entry<Module, Set<String>> entry : templates.entrySet()) {
for (String templateName : entry.getValue()) {
Template template = findTemplate(entry.getKey(), templateName, 1);
EClassifier templateType = template.getParameter().get(0).getType();
if (templateTypes.containsKey(templateType)) {
templateTypes.get(templateType).add(template);
} else {
Set<Template> temp = new CompactHashSet<Template>();
temp.add(template);
templateTypes.put(templateType, temp);
}
}
}
final Map<String, String> previewResult = new HashMap<String, String>();
// Calls all templates with each of their potential arguments
final List<Object> arguments = new ArrayList<Object>();
// The input model itself is a potential argument
arguments.add(model);
for (Map.Entry<EClassifier, Set<Template>> entry : templateTypes.entrySet()) {
if (entry.getKey().isInstance(model)) {
for (Template template : entry.getValue()) {
previewResult.putAll(doGenerateTemplate(template, arguments, generationRoot, monitor));
}
}
}
final TreeIterator<EObject> targetElements = model.eAllContents();
while (targetElements.hasNext()) {
final EObject potentialTarget = targetElements.next();
for (Map.Entry<EClassifier, Set<Template>> entry : templateTypes.entrySet()) {
if (entry.getKey().isInstance(potentialTarget)) {
arguments.clear();
arguments.add(potentialTarget);
for (Template template : entry.getValue()) {
previewResult
.putAll(doGenerateTemplate(template, arguments, generationRoot, monitor));
}
}
}
}
return previewResult;
}
/**
* Launches the generation of an Acceleo template given its name and containing module.
* <p>
* This is a convenience method that can only be used with single argument templates. The input model will
* be iterated over for objects matching the template's parameter type.
* </p>
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param module
* The module in which we seek a template <tt>templateName</tt>.
* @param templateName
* Name of the template that is to be generated.
* @param model
* Input model for this Acceleo template.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param blockTraceability
* This will indicate if we should deactivate the traceability (even if a listener register
* thanks to the extension point requires it).
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.1
*/
public Map<String, String> doGenerate(Module module, String templateName, EObject model,
File generationRoot, boolean blockTraceability, Monitor monitor) {
this.deactivateTraceability = blockTraceability;
return doGenerate(findTemplate(module, templateName, 1), model, generationRoot, monitor);
}
/**
* Launches the generation of an Acceleo template given its name and containing module.
* <p>
* This is a convenience method that can only be used with single argument templates. The input model will
* be iterated over for objects matching the template's parameter type.
* </p>
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param module
* The module in which we seek a template <tt>templateName</tt>.
* @param templateName
* Name of the template that is to be generated.
* @param model
* Input model for this Acceleo template.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.0
*/
public Map<String, String> doGenerate(Module module, String templateName, EObject model,
File generationRoot, Monitor monitor) {
return doGenerate(findTemplate(module, templateName, 1), model, generationRoot, monitor);
}
/**
* Launches the generation of an Acceleo template given its name and containing module.
* <p>
* This is a convenience method that can be used with multiple argument templates. The input model will be
* iterated over for objects matching the template's <b>first</b> parameter type. The template will then
* be called with these objects as first arguments, and the given list of <code>arguments</code> for the
* remaining template parameters.
* </p>
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param module
* The module in which we seek a template <tt>templateName</tt>.
* @param templateName
* Name of the template that is to be generated.
* @param model
* Input model for this Acceleo template.
* @param arguments
* Arguments of the template call, excluding the very first one (<code>model</code> object).
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
*/
public Map<String, String> doGenerate(Module module, String templateName, EObject model,
List<? extends Object> arguments, File generationRoot, Monitor monitor) {
if (model == null || arguments == null
|| (!(strategy instanceof PreviewStrategy) && generationRoot == null)) {
throw new NullPointerException(TEMPLATE_CALL_NPE);
}
final Template template = findTemplate(module, templateName, arguments.size() + 1);
// #findTemplate never returns private templates.
final Map<String, String> previewResult = new HashMap<String, String>();
// Calls the template with each potential arguments
final EClassifier argumentType = template.getParameter().get(0).getType();
if (argumentType.eIsProxy()) {
throw new AcceleoEvaluationException(AcceleoEngineMessages.getString(
"AcceleoService.TypeIsProxy", templateName)); //$NON-NLS-1$
}
boolean generatedHasOccurred = false;
// The input model itself is a potential argument
if (argumentType.isInstance(model)) {
final List<Object> actualArguments = new ArrayList<Object>();
actualArguments.add(model);
actualArguments.addAll(arguments);
previewResult.putAll(doGenerateTemplate(template, actualArguments, generationRoot, monitor));
generatedHasOccurred = true;
}
final TreeIterator<EObject> targetElements = model.eAllContents();
while (targetElements.hasNext()) {
final EObject potentialTarget = targetElements.next();
if (argumentType.isInstance(potentialTarget)) {
final List<Object> actualArguments = new ArrayList<Object>();
actualArguments.add(potentialTarget);
actualArguments.addAll(arguments);
previewResult.putAll(doGenerateTemplate(template, actualArguments, generationRoot, monitor));
generatedHasOccurred = true;
}
}
if (!generatedHasOccurred) {
AcceleoEnginePlugin.log(AcceleoEngineMessages.getString("AcceleoService.NoGenerationHasOccurred", //$NON-NLS-1$
templateName, EcoreUtil.getURI(argumentType)), false);
}
return previewResult;
}
/**
* Launches the generation of a single-argument Acceleo template for all matching EObjects in the given
* model.
* <p>
* This is a convenience method that can only be used with single argument templates. Any attempt at
* calling other templates through this method will throw {@link AcceleoEvaluationException}s. The input
* model will be iterated over for objects matching the template's parameter type.
* </p>
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param template
* The template that is to be generated
* @param model
* Input model for this Acceleo template.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.0
*/
public Map<String, String> doGenerate(Template template, EObject model, File generationRoot,
Monitor monitor) {
if (template == null || model == null
|| (!(strategy instanceof PreviewStrategy) && generationRoot == null)) {
throw new NullPointerException(TEMPLATE_CALL_NPE);
}
if (template.getVisibility() != VisibilityKind.PUBLIC) {
throw new AcceleoEvaluationException(AcceleoEngineMessages
.getString("AcceleoEngine.IllegalTemplateInvocation")); //$NON-NLS-1$
}
if (template.getParameter().size() != 1) {
throw new AcceleoEvaluationException(AcceleoEngineMessages
.getString("AcceleoEngine.VoidArguments")); //$NON-NLS-1$
}
final Map<String, String> previewResult = new HashMap<String, String>();
// Calls the template with each potential arguments
final EClassifier argumentType = template.getParameter().get(0).getType();
final List<Object> arguments = new ArrayList<Object>();
boolean generatedHasOccurred = false;
// The input model itself is a potential argument
if (argumentType.isInstance(model)) {
arguments.add(model);
previewResult.putAll(doGenerateTemplate(template, arguments, generationRoot, monitor));
generatedHasOccurred = true;
}
final TreeIterator<EObject> targetElements = model.eAllContents();
while (targetElements.hasNext()) {
final EObject potentialTarget = targetElements.next();
if (argumentType.isInstance(potentialTarget)) {
arguments.clear();
arguments.add(potentialTarget);
previewResult.putAll(doGenerateTemplate(template, arguments, generationRoot, monitor));
generatedHasOccurred = true;
}
}
if (!generatedHasOccurred) {
AcceleoEnginePlugin.log(AcceleoEngineMessages.getString("AcceleoService.NoGenerationHasOccurred", //$NON-NLS-1$
template.getName(), EcoreUtil.getURI(argumentType)), false);
}
return previewResult;
}
/**
* Launches the generation of an Acceleo template with the given arguments.
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line "processing class <className>" for each class of the input model.
* </p>
*
* @param module
* The module in which we seek a template <tt>templateName</tt>.
* @param templateName
* Name of the template that is to be generated.
* @param arguments
* Arguments that must be passed on to the template for evaluation.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
*/
public Map<String, String> doGenerateTemplate(Module module, String templateName,
List<? extends Object> arguments, File generationRoot, Monitor monitor) {
return doGenerateTemplate(findTemplate(module, templateName, arguments), arguments, generationRoot,
monitor);
}
/**
* Launches the generation of an Acceleo template with the given arguments.
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line <tt>"processing class <className>"</tt> for each class of the input
* model.
* </p>
*
* @param template
* The template that is to be generated
* @param arguments
* Arguments that must be passed on to the template for evaluation.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param blockTraceability
* This will indicate if we should deactivate the traceability (even if listener register
* thanks to the extension point requires it).
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.1
*/
public Map<String, String> doGenerateTemplate(Template template, List<? extends Object> arguments,
File generationRoot, boolean blockTraceability, Monitor monitor) {
this.deactivateTraceability = blockTraceability;
return doGenerateTemplate(template, arguments, generationRoot, monitor);
}
/**
* Launches the generation of an Acceleo template with the given arguments.
* <p>
* <tt>generationRoot</tt> will be used as the root of all generated files. For example, a template such
* as
*
* <pre>
* [template generate(c:EClass)]
* [file(log.log, true)]processing class [c.name/][/file]
* [/template]
* </pre>
*
* evaluated with <tt>file:\\c:\</tt> as <tt>generationRoot</tt> would create the file <tt>c:\log.log</tt>
* and generate a line <tt>"processing class <className>"</tt> for each class of the input
* model.
* </p>
*
* @param template
* The template that is to be generated
* @param arguments
* Arguments that must be passed on to the template for evaluation.
* @param generationRoot
* This will be used as the root for the generated files. This can be <code>null</code>, in
* which case the user home directory will be used as root.
* @param monitor
* This will be used as the progress monitor for the generation. Can be <code>null</code>.
* @return if <code>preview</code> is set to <code>true</code>, no files will be generated. Instead, a Map
* mapping all file paths to the potential content will be returned. This returned map will be
* empty otherwise.
* @since 3.0
*/
public Map<String, String> doGenerateTemplate(Template template, List<? extends Object> arguments,
File generationRoot, Monitor monitor) {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.addListener(listener);
}
List<AcceleoListenerDescriptor> descriptorsUsed = new ArrayList<AcceleoListenerDescriptor>();
List<AcceleoListenerDescriptor> descriptors = AcceleoTraceabilityRegistryListenerUils
.getListenerDescriptors();
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptors) {
if (acceleoListenerDescriptor.getNature() == null) {
// If we are in stand alone, only use the descriptors without nature
descriptorsUsed.add(acceleoListenerDescriptor);
- } else if (EMFPlugin.IS_ECLIPSE_RUNNING) {
+ } else if (EMFPlugin.IS_ECLIPSE_RUNNING && generationRoot != null) {
// Check the nature of the output project
IPath location = new Path(generationRoot.getAbsolutePath());
IFile iFile = ResourcesPlugin.getWorkspace().getRoot().getFileForLocation(location);
if (iFile != null) {
IProject project = iFile.getProject();
try {
if (project != null && project.isAccessible()
&& project.hasNature(acceleoListenerDescriptor.getNature())) {
descriptorsUsed.add(acceleoListenerDescriptor);
}
} catch (CoreException e) {
AcceleoEnginePlugin.log(e, false);
}
}
}
}
boolean forceTraceability = false;
if (!deactivateTraceability) {
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
IAcceleoTextGenerationListener listener = acceleoListenerDescriptor.getTraceabilityListener();
if (listener instanceof AbstractAcceleoTextGenerationListener) {
AbstractAcceleoTextGenerationListener textGenerationListener = (AbstractAcceleoTextGenerationListener)listener;
textGenerationListener.setGenerationID(generationID);
}
// If one of the listeners wants to force the traceability it will have it.
if (!AcceleoPreferences.isTraceabilityEnabled()
&& acceleoListenerDescriptor.isForceTraceability()) {
AcceleoPreferences.switchTraceability(true);
forceTraceability = true;
}
}
// We create the engine once again if someone has forced the traceability
if (forceTraceability) {
createEngine();
// We restore all the content of the previous engine
for (IAcceleoTextGenerationListener listener : this.addedListeners) {
generationEngine.addListener(listener);
}
for (Map<String, String> properties : this.addedProperties) {
generationEngine.addProperties(properties);
}
for (String propertiesFiles : this.addedPropertiesfiles) {
generationEngine.addProperties(propertiesFiles);
}
}
}
// We add the listeners contributed thanks to the extension point.
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.addListener(acceleoListenerDescriptor.getTraceabilityListener());
}
try {
return generationEngine.evaluate(template, arguments, generationRoot, strategy, monitor);
} finally {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.removeListener(listener);
}
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.removeListener(acceleoListenerDescriptor.getTraceabilityListener());
}
// Clear the cache
addedListeners.clear();
addedProperties.clear();
addedPropertiesfiles.clear();
if (forceTraceability) {
AcceleoPreferences.switchTraceability(false);
}
}
}
/**
* Removes a listener from the notification loops.
*
* @param listener
* The listener that is to be removed from this engine's notification loops.
* @since 0.8
*/
public void removeListener(IAcceleoTextGenerationListener listener) {
generationEngine.removeListener(listener);
}
/**
* Instantiates the engine that will be used by this service for the generation.
*/
private void createEngine() {
if (EMFPlugin.IS_ECLIPSE_RUNNING) {
generationEngine = new DefaultEngineSelector().selectEngine(AcceleoEngineRegistry
.getRegisteredCreators());
}
if (generationEngine == null) {
generationEngine = new AcceleoEngine();
}
}
/**
* This will iterate through the module's elements to find public templates named <tt>templateName</tt>
* with the given count of arguments and return the first found.
*
* @param module
* The module in which we seek a template <tt>templateName</tt>.
* @param templateName
* Name of the sought template.
* @param argumentCount
* Number of arguments of the sought template.
* @return The first public template of this name contained by <tt>module</tt>. Will fail in
* {@link AcceleoEvaluationException} if none can be found.
*/
private Template findTemplate(Module module, String templateName, int argumentCount) {
for (ModuleElement element : module.getOwnedModuleElement()) {
if (element instanceof Template) {
Template template = (Template)element;
if (template.getVisibility() == VisibilityKind.PUBLIC
&& templateName.equals(template.getName())
&& template.getParameter().size() == argumentCount) {
return template;
}
}
}
throw new AcceleoEvaluationException(AcceleoEngineMessages.getString(
"AcceleoService.UndefinedTemplate", templateName, module.getName())); //$NON-NLS-1$
}
/**
* This will iterate through the module's elements to find public templates which argument types
* correspond to the given list of argument values.
*
* @param module
* The module in which we seek the template.
* @param templateName
* Name of the sought template.
* @param arguments
* Values of the argument we wish to pass on to the template.
* @return The first public template of this name with matching arguments contained by <tt>module</tt>.
* Will fail in {@link AcceleoEvaluationException} if none can be found.
*/
private Template findTemplate(Module module, String templateName, List<? extends Object> arguments) {
for (ModuleElement element : module.getOwnedModuleElement()) {
if (element instanceof Template) {
Template template = (Template)element;
if (template.getVisibility() == VisibilityKind.PUBLIC
&& templateName.equals(template.getName())
&& template.getParameter().size() == arguments.size()) {
boolean parameterMatch = true;
for (int i = 0; i < template.getParameter().size(); i++) {
if (!template.getParameter().get(i).getType().isInstance(arguments.get(i))) {
parameterMatch = false;
}
}
if (parameterMatch) {
return template;
}
}
}
}
throw new AcceleoEvaluationException(AcceleoEngineMessages.getString(
"AcceleoService.UndefinedTemplate", templateName, module.getName())); //$NON-NLS-1$
}
/**
* Sets the generation ID.
*
* @param generationID
* The generation ID.
* @since 3.1
*/
public void setGenerationID(String generationID) {
this.generationID = generationID;
}
}
| true | true | public Map<String, String> doGenerateTemplate(Template template, List<? extends Object> arguments,
File generationRoot, Monitor monitor) {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.addListener(listener);
}
List<AcceleoListenerDescriptor> descriptorsUsed = new ArrayList<AcceleoListenerDescriptor>();
List<AcceleoListenerDescriptor> descriptors = AcceleoTraceabilityRegistryListenerUils
.getListenerDescriptors();
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptors) {
if (acceleoListenerDescriptor.getNature() == null) {
// If we are in stand alone, only use the descriptors without nature
descriptorsUsed.add(acceleoListenerDescriptor);
} else if (EMFPlugin.IS_ECLIPSE_RUNNING) {
// Check the nature of the output project
IPath location = new Path(generationRoot.getAbsolutePath());
IFile iFile = ResourcesPlugin.getWorkspace().getRoot().getFileForLocation(location);
if (iFile != null) {
IProject project = iFile.getProject();
try {
if (project != null && project.isAccessible()
&& project.hasNature(acceleoListenerDescriptor.getNature())) {
descriptorsUsed.add(acceleoListenerDescriptor);
}
} catch (CoreException e) {
AcceleoEnginePlugin.log(e, false);
}
}
}
}
boolean forceTraceability = false;
if (!deactivateTraceability) {
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
IAcceleoTextGenerationListener listener = acceleoListenerDescriptor.getTraceabilityListener();
if (listener instanceof AbstractAcceleoTextGenerationListener) {
AbstractAcceleoTextGenerationListener textGenerationListener = (AbstractAcceleoTextGenerationListener)listener;
textGenerationListener.setGenerationID(generationID);
}
// If one of the listeners wants to force the traceability it will have it.
if (!AcceleoPreferences.isTraceabilityEnabled()
&& acceleoListenerDescriptor.isForceTraceability()) {
AcceleoPreferences.switchTraceability(true);
forceTraceability = true;
}
}
// We create the engine once again if someone has forced the traceability
if (forceTraceability) {
createEngine();
// We restore all the content of the previous engine
for (IAcceleoTextGenerationListener listener : this.addedListeners) {
generationEngine.addListener(listener);
}
for (Map<String, String> properties : this.addedProperties) {
generationEngine.addProperties(properties);
}
for (String propertiesFiles : this.addedPropertiesfiles) {
generationEngine.addProperties(propertiesFiles);
}
}
}
// We add the listeners contributed thanks to the extension point.
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.addListener(acceleoListenerDescriptor.getTraceabilityListener());
}
try {
return generationEngine.evaluate(template, arguments, generationRoot, strategy, monitor);
} finally {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.removeListener(listener);
}
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.removeListener(acceleoListenerDescriptor.getTraceabilityListener());
}
// Clear the cache
addedListeners.clear();
addedProperties.clear();
addedPropertiesfiles.clear();
if (forceTraceability) {
AcceleoPreferences.switchTraceability(false);
}
}
}
| public Map<String, String> doGenerateTemplate(Template template, List<? extends Object> arguments,
File generationRoot, Monitor monitor) {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.addListener(listener);
}
List<AcceleoListenerDescriptor> descriptorsUsed = new ArrayList<AcceleoListenerDescriptor>();
List<AcceleoListenerDescriptor> descriptors = AcceleoTraceabilityRegistryListenerUils
.getListenerDescriptors();
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptors) {
if (acceleoListenerDescriptor.getNature() == null) {
// If we are in stand alone, only use the descriptors without nature
descriptorsUsed.add(acceleoListenerDescriptor);
} else if (EMFPlugin.IS_ECLIPSE_RUNNING && generationRoot != null) {
// Check the nature of the output project
IPath location = new Path(generationRoot.getAbsolutePath());
IFile iFile = ResourcesPlugin.getWorkspace().getRoot().getFileForLocation(location);
if (iFile != null) {
IProject project = iFile.getProject();
try {
if (project != null && project.isAccessible()
&& project.hasNature(acceleoListenerDescriptor.getNature())) {
descriptorsUsed.add(acceleoListenerDescriptor);
}
} catch (CoreException e) {
AcceleoEnginePlugin.log(e, false);
}
}
}
}
boolean forceTraceability = false;
if (!deactivateTraceability) {
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
IAcceleoTextGenerationListener listener = acceleoListenerDescriptor.getTraceabilityListener();
if (listener instanceof AbstractAcceleoTextGenerationListener) {
AbstractAcceleoTextGenerationListener textGenerationListener = (AbstractAcceleoTextGenerationListener)listener;
textGenerationListener.setGenerationID(generationID);
}
// If one of the listeners wants to force the traceability it will have it.
if (!AcceleoPreferences.isTraceabilityEnabled()
&& acceleoListenerDescriptor.isForceTraceability()) {
AcceleoPreferences.switchTraceability(true);
forceTraceability = true;
}
}
// We create the engine once again if someone has forced the traceability
if (forceTraceability) {
createEngine();
// We restore all the content of the previous engine
for (IAcceleoTextGenerationListener listener : this.addedListeners) {
generationEngine.addListener(listener);
}
for (Map<String, String> properties : this.addedProperties) {
generationEngine.addProperties(properties);
}
for (String propertiesFiles : this.addedPropertiesfiles) {
generationEngine.addProperties(propertiesFiles);
}
}
}
// We add the listeners contributed thanks to the extension point.
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.addListener(acceleoListenerDescriptor.getTraceabilityListener());
}
try {
return generationEngine.evaluate(template, arguments, generationRoot, strategy, monitor);
} finally {
for (IAcceleoTextGenerationListener listener : STATIC_LISTENERS) {
generationEngine.removeListener(listener);
}
for (AcceleoListenerDescriptor acceleoListenerDescriptor : descriptorsUsed) {
generationEngine.removeListener(acceleoListenerDescriptor.getTraceabilityListener());
}
// Clear the cache
addedListeners.clear();
addedProperties.clear();
addedPropertiesfiles.clear();
if (forceTraceability) {
AcceleoPreferences.switchTraceability(false);
}
}
}
|
diff --git a/src/net/sf/antcontrib/cpptasks/devstudio/DevStudioCompatibleCCompiler.java b/src/net/sf/antcontrib/cpptasks/devstudio/DevStudioCompatibleCCompiler.java
index 25027990..dc1afb31 100644
--- a/src/net/sf/antcontrib/cpptasks/devstudio/DevStudioCompatibleCCompiler.java
+++ b/src/net/sf/antcontrib/cpptasks/devstudio/DevStudioCompatibleCCompiler.java
@@ -1,133 +1,135 @@
/*
*
* Copyright 2002-2004 The Ant-Contrib project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.antcontrib.cpptasks.devstudio;
import java.io.File;
import java.util.Vector;
import net.sf.antcontrib.cpptasks.CUtil;
import net.sf.antcontrib.cpptasks.compiler.CommandLineCompilerConfiguration;
import net.sf.antcontrib.cpptasks.compiler.CompilerConfiguration;
import net.sf.antcontrib.cpptasks.compiler.LinkType;
import net.sf.antcontrib.cpptasks.compiler.PrecompilingCommandLineCCompiler;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.types.Environment;
import net.sf.antcontrib.cpptasks.OptimizationEnum;
/**
* An abstract base class for compilers that are basically command line
* compatible with Microsoft(r) C/C++ Optimizing Compiler
*
* @author Curt Arnold
*/
public abstract class DevStudioCompatibleCCompiler
extends
PrecompilingCommandLineCCompiler {
private static String[] mflags = new String[]{
//
// first four are single-threaded
// (runtime=static,debug=false), (..,debug=true),
// (runtime=dynamic,debug=true), (..,debug=false), (not supported)
// next four are multi-threaded, same sequence
"/ML", "/MLd", null, null, "/MT", "/MTd", "/MD", "/MDd"};
protected DevStudioCompatibleCCompiler(String command,
String identifierArg, boolean newEnvironment, Environment env) {
super(command, identifierArg, new String[]{".c", ".cc", ".cpp", ".cxx",
".c++"}, new String[]{".h", ".hpp", ".inl"}, ".obj", false,
null, newEnvironment, env);
}
protected void addImpliedArgs(final Vector args,
final boolean debug,
final boolean multithreaded,
final boolean exceptions,
final LinkType linkType,
final Boolean rtti,
final OptimizationEnum optimization) {
args.addElement("/c");
args.addElement("/nologo");
if (exceptions) {
// changed to eliminate warning on VC 2005, should support VC 6 and later
// use /GX to support VC5 - 2005 (with warning)
args.addElement("/EHsc");
}
int mindex = 0;
if (multithreaded) {
mindex += 4;
}
boolean staticRuntime = linkType.isStaticRuntime();
if (!staticRuntime) {
mindex += 2;
}
if (debug) {
mindex += 1;
+// FREEHEP changed /Zi into /Z7
args.addElement("/Zi");
args.addElement("/Od");
args.addElement("/GZ");
args.addElement("/D_DEBUG");
} else {
if (optimization != null) {
if (optimization.isSize()) {
args.addElement("/O1");
}
if (optimization.isSpeed()) {
args.addElement("/O2");
}
}
args.addElement("/DNDEBUG");
}
String mflag = mflags[mindex];
if (mflag == null) {
throw new BuildException(
"multithread='false' and runtime='dynamic' not supported");
}
args.addElement(mflag);
if (rtti != null && rtti.booleanValue()) {
args.addElement("/GR");
}
}
protected void addWarningSwitch(Vector args, int level) {
DevStudioProcessor.addWarningSwitch(args, level);
}
protected CompilerConfiguration createPrecompileGeneratingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch", "/Yc"};
+ // FREEHEP FIXME we may need /Yd here, but only in debug mode, how do we find out?
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
null, true);
}
protected CompilerConfiguration createPrecompileUsingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude, String[] exceptFiles) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch",
"/Yu" + lastInclude};
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
exceptFiles, false);
}
protected void getDefineSwitch(StringBuffer buffer, String define,
String value) {
DevStudioProcessor.getDefineSwitch(buffer, define, value);
}
protected File[] getEnvironmentIncludePath() {
return CUtil.getPathFromEnvironment("INCLUDE", ";");
}
protected String getIncludeDirSwitch(String includeDir) {
return DevStudioProcessor.getIncludeDirSwitch(includeDir);
}
protected void getUndefineSwitch(StringBuffer buffer, String define) {
DevStudioProcessor.getUndefineSwitch(buffer, define);
}
}
| false | true | protected DevStudioCompatibleCCompiler(String command,
String identifierArg, boolean newEnvironment, Environment env) {
super(command, identifierArg, new String[]{".c", ".cc", ".cpp", ".cxx",
".c++"}, new String[]{".h", ".hpp", ".inl"}, ".obj", false,
null, newEnvironment, env);
}
protected void addImpliedArgs(final Vector args,
final boolean debug,
final boolean multithreaded,
final boolean exceptions,
final LinkType linkType,
final Boolean rtti,
final OptimizationEnum optimization) {
args.addElement("/c");
args.addElement("/nologo");
if (exceptions) {
// changed to eliminate warning on VC 2005, should support VC 6 and later
// use /GX to support VC5 - 2005 (with warning)
args.addElement("/EHsc");
}
int mindex = 0;
if (multithreaded) {
mindex += 4;
}
boolean staticRuntime = linkType.isStaticRuntime();
if (!staticRuntime) {
mindex += 2;
}
if (debug) {
mindex += 1;
args.addElement("/Zi");
args.addElement("/Od");
args.addElement("/GZ");
args.addElement("/D_DEBUG");
} else {
if (optimization != null) {
if (optimization.isSize()) {
args.addElement("/O1");
}
if (optimization.isSpeed()) {
args.addElement("/O2");
}
}
args.addElement("/DNDEBUG");
}
String mflag = mflags[mindex];
if (mflag == null) {
throw new BuildException(
"multithread='false' and runtime='dynamic' not supported");
}
args.addElement(mflag);
if (rtti != null && rtti.booleanValue()) {
args.addElement("/GR");
}
}
protected void addWarningSwitch(Vector args, int level) {
DevStudioProcessor.addWarningSwitch(args, level);
}
protected CompilerConfiguration createPrecompileGeneratingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch", "/Yc"};
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
null, true);
}
protected CompilerConfiguration createPrecompileUsingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude, String[] exceptFiles) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch",
"/Yu" + lastInclude};
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
exceptFiles, false);
}
protected void getDefineSwitch(StringBuffer buffer, String define,
String value) {
DevStudioProcessor.getDefineSwitch(buffer, define, value);
}
protected File[] getEnvironmentIncludePath() {
return CUtil.getPathFromEnvironment("INCLUDE", ";");
}
protected String getIncludeDirSwitch(String includeDir) {
return DevStudioProcessor.getIncludeDirSwitch(includeDir);
}
protected void getUndefineSwitch(StringBuffer buffer, String define) {
DevStudioProcessor.getUndefineSwitch(buffer, define);
}
}
| protected DevStudioCompatibleCCompiler(String command,
String identifierArg, boolean newEnvironment, Environment env) {
super(command, identifierArg, new String[]{".c", ".cc", ".cpp", ".cxx",
".c++"}, new String[]{".h", ".hpp", ".inl"}, ".obj", false,
null, newEnvironment, env);
}
protected void addImpliedArgs(final Vector args,
final boolean debug,
final boolean multithreaded,
final boolean exceptions,
final LinkType linkType,
final Boolean rtti,
final OptimizationEnum optimization) {
args.addElement("/c");
args.addElement("/nologo");
if (exceptions) {
// changed to eliminate warning on VC 2005, should support VC 6 and later
// use /GX to support VC5 - 2005 (with warning)
args.addElement("/EHsc");
}
int mindex = 0;
if (multithreaded) {
mindex += 4;
}
boolean staticRuntime = linkType.isStaticRuntime();
if (!staticRuntime) {
mindex += 2;
}
if (debug) {
mindex += 1;
// FREEHEP changed /Zi into /Z7
args.addElement("/Zi");
args.addElement("/Od");
args.addElement("/GZ");
args.addElement("/D_DEBUG");
} else {
if (optimization != null) {
if (optimization.isSize()) {
args.addElement("/O1");
}
if (optimization.isSpeed()) {
args.addElement("/O2");
}
}
args.addElement("/DNDEBUG");
}
String mflag = mflags[mindex];
if (mflag == null) {
throw new BuildException(
"multithread='false' and runtime='dynamic' not supported");
}
args.addElement(mflag);
if (rtti != null && rtti.booleanValue()) {
args.addElement("/GR");
}
}
protected void addWarningSwitch(Vector args, int level) {
DevStudioProcessor.addWarningSwitch(args, level);
}
protected CompilerConfiguration createPrecompileGeneratingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch", "/Yc"};
// FREEHEP FIXME we may need /Yd here, but only in debug mode, how do we find out?
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
null, true);
}
protected CompilerConfiguration createPrecompileUsingConfig(
CommandLineCompilerConfiguration baseConfig, File prototype,
String lastInclude, String[] exceptFiles) {
String[] additionalArgs = new String[]{
"/Fp" + CUtil.getBasename(prototype) + ".pch",
"/Yu" + lastInclude};
return new CommandLineCompilerConfiguration(baseConfig, additionalArgs,
exceptFiles, false);
}
protected void getDefineSwitch(StringBuffer buffer, String define,
String value) {
DevStudioProcessor.getDefineSwitch(buffer, define, value);
}
protected File[] getEnvironmentIncludePath() {
return CUtil.getPathFromEnvironment("INCLUDE", ";");
}
protected String getIncludeDirSwitch(String includeDir) {
return DevStudioProcessor.getIncludeDirSwitch(includeDir);
}
protected void getUndefineSwitch(StringBuffer buffer, String define) {
DevStudioProcessor.getUndefineSwitch(buffer, define);
}
}
|
diff --git a/modules/cpr/src/main/java/org/atmosphere/cache/AbstractBroadcasterCache.java b/modules/cpr/src/main/java/org/atmosphere/cache/AbstractBroadcasterCache.java
index ccd8f3166..247df5464 100644
--- a/modules/cpr/src/main/java/org/atmosphere/cache/AbstractBroadcasterCache.java
+++ b/modules/cpr/src/main/java/org/atmosphere/cache/AbstractBroadcasterCache.java
@@ -1,194 +1,194 @@
/*
* Copyright 2012 Jeanfrancois Arcand
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.atmosphere.cache;
import org.atmosphere.cpr.BroadcasterCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Abstract {@link org.atmosphere.cpr.BroadcasterCache} which is used to implement headers or query parameters or
* session based caching.
*
* @author Paul Khodchenkov
* @author Jeanfrancois Arcand
*/
public abstract class AbstractBroadcasterCache implements BroadcasterCache {
private final Logger logger = LoggerFactory.getLogger(AbstractBroadcasterCache.class);
protected final List<CacheMessage> messages = new LinkedList<CacheMessage>();
protected final Set<String> messagesIds = new HashSet<String>();
protected final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
protected ScheduledFuture scheduledFuture;
protected long maxCacheTime = TimeUnit.MINUTES.toMillis(2);//2 minutes
protected long invalidateCacheInterval = TimeUnit.MINUTES.toMillis(1);//1 minute
protected ScheduledExecutorService reaper = Executors.newSingleThreadScheduledExecutor();
protected boolean isShared = false;
protected final List<BroadcasterCacheInspector> inspectors = new LinkedList<BroadcasterCacheInspector>();
@Override
public void start() {
- reaper.scheduleAtFixedRate(new Runnable() {
+ scheduledFuture = reaper.scheduleAtFixedRate(new Runnable() {
public void run() {
readWriteLock.writeLock().lock();
try {
long now = System.nanoTime();
List<CacheMessage> expiredMessages = new ArrayList<CacheMessage>();
for (CacheMessage message : messages) {
if (TimeUnit.NANOSECONDS.toMillis(now - message.getCreateTime()) > maxCacheTime) {
expiredMessages.add(message);
}
}
for (CacheMessage expiredMessage : expiredMessages) {
messages.remove(expiredMessage);
messagesIds.remove(expiredMessage.getId());
}
} finally {
readWriteLock.writeLock().unlock();
}
}
}, 0, invalidateCacheInterval, TimeUnit.MILLISECONDS);
}
@Override
public void stop() {
if (scheduledFuture != null) {
scheduledFuture.cancel(false);
scheduledFuture = null;
}
if (!isShared) {
reaper.shutdown();
}
}
protected void put(Message message, Long now) {
if (!inspect(message)) return;
logger.trace("Caching message {} for Broadcaster {}", message.message);
readWriteLock.writeLock().lock();
try {
boolean hasMessageWithSameId = messagesIds.contains(message.id);
if (!hasMessageWithSameId) {
CacheMessage cacheMessage = new CacheMessage(message.id, now, message.message);
messages.add(cacheMessage);
messagesIds.add(message.id);
}
} finally {
readWriteLock.writeLock().unlock();
}
}
protected List<Object> get(long cacheHeaderTime) {
List<Object> result = new ArrayList<Object>();
readWriteLock.readLock().lock();
try {
for (CacheMessage cacheMessage : messages) {
if (cacheMessage.getCreateTime() > cacheHeaderTime) {
result.add(cacheMessage.getMessage());
}
}
} finally {
readWriteLock.readLock().unlock();
}
logger.trace("Retrieved messages {}", result);
return result;
}
/**
* Set to true the associated {@link #getReaper()} is shared amongs {@link BroadcasterCache}
*
* @param isShared to true if shared. False by default.
* @return this
*/
public AbstractBroadcasterCache setShared(boolean isShared) {
this.isShared = isShared;
return this;
}
/**
* Set the {@link ScheduledExecutorService} to clear the cached message.
*
* @param reaper the {@link ScheduledExecutorService} to clear the cached message.
* @return this
*/
public AbstractBroadcasterCache setReaper(ScheduledExecutorService reaper) {
this.reaper = reaper;
return this;
}
/**
* Return the {@link ScheduledExecutorService}
*
* @return the {@link ScheduledExecutorService}
*/
public ScheduledExecutorService getReaper() {
return reaper;
}
/**
* Set the time, in millisecond, the cache will be checked and purged.
*
* @param invalidateCacheInterval
* @return this
*/
public AbstractBroadcasterCache setInvalidateCacheInterval(long invalidateCacheInterval) {
this.invalidateCacheInterval = invalidateCacheInterval;
return this;
}
/**
* Set the maxium time, in millisecond, a message stay alive in the cache.
*
* @param maxCacheTime the maxium time, in millisecond, a message stay alive in the cache.
* @return this
*/
public AbstractBroadcasterCache setMaxCacheTime(long maxCacheTime) {
this.maxCacheTime = maxCacheTime;
return this;
}
@Override
public BroadcasterCache inspector(BroadcasterCacheInspector b) {
inspectors.add(b);
return this;
}
protected boolean inspect(Message m) {
for (BroadcasterCacheInspector b : inspectors) {
if (!b.inspect(m)) return false;
}
return true;
}
}
| true | true | public void start() {
reaper.scheduleAtFixedRate(new Runnable() {
public void run() {
readWriteLock.writeLock().lock();
try {
long now = System.nanoTime();
List<CacheMessage> expiredMessages = new ArrayList<CacheMessage>();
for (CacheMessage message : messages) {
if (TimeUnit.NANOSECONDS.toMillis(now - message.getCreateTime()) > maxCacheTime) {
expiredMessages.add(message);
}
}
for (CacheMessage expiredMessage : expiredMessages) {
messages.remove(expiredMessage);
messagesIds.remove(expiredMessage.getId());
}
} finally {
readWriteLock.writeLock().unlock();
}
}
}, 0, invalidateCacheInterval, TimeUnit.MILLISECONDS);
}
| public void start() {
scheduledFuture = reaper.scheduleAtFixedRate(new Runnable() {
public void run() {
readWriteLock.writeLock().lock();
try {
long now = System.nanoTime();
List<CacheMessage> expiredMessages = new ArrayList<CacheMessage>();
for (CacheMessage message : messages) {
if (TimeUnit.NANOSECONDS.toMillis(now - message.getCreateTime()) > maxCacheTime) {
expiredMessages.add(message);
}
}
for (CacheMessage expiredMessage : expiredMessages) {
messages.remove(expiredMessage);
messagesIds.remove(expiredMessage.getId());
}
} finally {
readWriteLock.writeLock().unlock();
}
}
}, 0, invalidateCacheInterval, TimeUnit.MILLISECONDS);
}
|
diff --git a/runtime/src/share/classes/javafx/application/WindowImpl.java b/runtime/src/share/classes/javafx/application/WindowImpl.java
index c4e662224..8d7f6ada7 100644
--- a/runtime/src/share/classes/javafx/application/WindowImpl.java
+++ b/runtime/src/share/classes/javafx/application/WindowImpl.java
@@ -1,153 +1,152 @@
/*
* Copyright 2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
package javafx.application;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JRootPane;
import javax.swing.RootPaneContainer;
import java.awt.Graphics;
import java.lang.reflect.Method;
/**
* TransparentWindowImpl - This is an internal implementation class for
* Transparent and shaped window support in Window.fx and Frame.fx. It should
* not be public or be ever used other than in those classes.
*
* @author Created by Jasper Potts (Jun 18, 2008)
*/
class WindowImpl {
private static final boolean isMac = "Mac OS X".equals(System.getProperty("os.name"));
static JDialog createJDialog(java.awt.Window owner) {
JDialog dialog;
if (isMac) {
dialog = new JDialog((java.awt.Window) owner) {
@Override protected JRootPane createRootPane() {
JRootPane rp = new JRootPane() {
@Override public void paint(Graphics g) {
g.clearRect(0, 0, getWidth(), getHeight());
super.paint(g);
}
};
rp.setOpaque(true);
return rp;
}
};
} else {
dialog = new JDialog((java.awt.Window) owner);
}
// set the default background color of white
dialog.setBackground(java.awt.Color.WHITE);
return dialog;
}
static JFrame createJFrame() {
JFrame frame;
if (isMac) {
frame = new JFrame() {
@Override protected JRootPane createRootPane() {
JRootPane rp = new JRootPane() {
@Override public void paint(Graphics g) {
g.clearRect(0, 0, getWidth(), getHeight());
super.paint(g);
}
};
rp.setOpaque(true);
return rp;
}
};
} else {
frame = new JFrame();
}
// set the default background color of white
frame.setBackground(java.awt.Color.WHITE);
return frame;
}
static void setWindowTransparency(java.awt.Window window, boolean transparent) {
// check if were are already in the required state
if (isMac && window instanceof RootPaneContainer) {
- System.out.println("setWindowTransparency isMac="+isMac+ " transparent="+transparent);
RootPaneContainer rootPaneContainer = (RootPaneContainer) window;
window.setBackground(new java.awt.Color(0, 0, 0, 0));
// remove window shadows as they need to be updated
// every time the contents outline changes and there
// is no way we can detect this. It is to expensive
// to just update on every repaint.
rootPaneContainer.getRootPane()
.putClientProperty("apple.awt.windowShadow.revalidateNow", new Object());
rootPaneContainer.getRootPane().putClientProperty("Window.hasShadow", Boolean.FALSE);
// disable window dragging
rootPaneContainer.getRootPane().putClientProperty("apple.awt.draggableWindowBackground", false);
} else {
// try 6u10 AWTUtilities transparency
Class c = null;
try {
c = Class.forName("com.sun.awt.AWTUtilities");
} catch (ClassNotFoundException e) {}// ignore as handled
if (c != null) {
try {
Method m = c.getMethod("setWindowOpaque", java.awt.Window.class, Boolean.TYPE);
try {
m.invoke(null, window, !transparent);
} catch (UnsupportedOperationException e) {
System.err.println("Warning: Transparent windows are not " +
"supported by the current platform.");
}
} catch (Exception e) {
System.err.println("Error setting window transparency using AWTUtilities");
e.printStackTrace();
}
}
}
}
static void setWindowOpacity(java.awt.Window window, double opacity) {
if (isMac && window instanceof RootPaneContainer) {
RootPaneContainer rootPaneContainer = (RootPaneContainer) window;
rootPaneContainer.getRootPane().putClientProperty("Window.alpha", new Float(opacity));
} else {
// try 6u10 AWTUtilities transparency
Class c = null;
try {
c = Class.forName("com.sun.awt.AWTUtilities");
} catch (ClassNotFoundException e) {}// ignore as handled
if (c != null) {
try {
Method m = c.getMethod("setWindowOpacity", java.awt.Window.class, Float.TYPE);
try {
m.invoke(null, window, (float)opacity);
} catch (UnsupportedOperationException e) {
System.err.println("Warning: Setting the opacity of a window is not " +
"supported by the current platform.");
}
} catch (Exception e) {
System.err.println("Error setting window opacity using AWTUtilities");
e.printStackTrace();
}
}
}
}
}
| true | true | static void setWindowTransparency(java.awt.Window window, boolean transparent) {
// check if were are already in the required state
if (isMac && window instanceof RootPaneContainer) {
System.out.println("setWindowTransparency isMac="+isMac+ " transparent="+transparent);
RootPaneContainer rootPaneContainer = (RootPaneContainer) window;
window.setBackground(new java.awt.Color(0, 0, 0, 0));
// remove window shadows as they need to be updated
// every time the contents outline changes and there
// is no way we can detect this. It is to expensive
// to just update on every repaint.
rootPaneContainer.getRootPane()
.putClientProperty("apple.awt.windowShadow.revalidateNow", new Object());
rootPaneContainer.getRootPane().putClientProperty("Window.hasShadow", Boolean.FALSE);
// disable window dragging
rootPaneContainer.getRootPane().putClientProperty("apple.awt.draggableWindowBackground", false);
} else {
// try 6u10 AWTUtilities transparency
Class c = null;
try {
c = Class.forName("com.sun.awt.AWTUtilities");
} catch (ClassNotFoundException e) {}// ignore as handled
if (c != null) {
try {
Method m = c.getMethod("setWindowOpaque", java.awt.Window.class, Boolean.TYPE);
try {
m.invoke(null, window, !transparent);
} catch (UnsupportedOperationException e) {
System.err.println("Warning: Transparent windows are not " +
"supported by the current platform.");
}
} catch (Exception e) {
System.err.println("Error setting window transparency using AWTUtilities");
e.printStackTrace();
}
}
}
}
| static void setWindowTransparency(java.awt.Window window, boolean transparent) {
// check if were are already in the required state
if (isMac && window instanceof RootPaneContainer) {
RootPaneContainer rootPaneContainer = (RootPaneContainer) window;
window.setBackground(new java.awt.Color(0, 0, 0, 0));
// remove window shadows as they need to be updated
// every time the contents outline changes and there
// is no way we can detect this. It is to expensive
// to just update on every repaint.
rootPaneContainer.getRootPane()
.putClientProperty("apple.awt.windowShadow.revalidateNow", new Object());
rootPaneContainer.getRootPane().putClientProperty("Window.hasShadow", Boolean.FALSE);
// disable window dragging
rootPaneContainer.getRootPane().putClientProperty("apple.awt.draggableWindowBackground", false);
} else {
// try 6u10 AWTUtilities transparency
Class c = null;
try {
c = Class.forName("com.sun.awt.AWTUtilities");
} catch (ClassNotFoundException e) {}// ignore as handled
if (c != null) {
try {
Method m = c.getMethod("setWindowOpaque", java.awt.Window.class, Boolean.TYPE);
try {
m.invoke(null, window, !transparent);
} catch (UnsupportedOperationException e) {
System.err.println("Warning: Transparent windows are not " +
"supported by the current platform.");
}
} catch (Exception e) {
System.err.println("Error setting window transparency using AWTUtilities");
e.printStackTrace();
}
}
}
}
|
diff --git a/SakaiRSFComponents/evolvers/src/java/uk/ac/cam/caret/sakai/rsf/evolverimpl/SakaiFCKTextEvolver.java b/SakaiRSFComponents/evolvers/src/java/uk/ac/cam/caret/sakai/rsf/evolverimpl/SakaiFCKTextEvolver.java
index 7fc723c..dfb9e9a 100644
--- a/SakaiRSFComponents/evolvers/src/java/uk/ac/cam/caret/sakai/rsf/evolverimpl/SakaiFCKTextEvolver.java
+++ b/SakaiRSFComponents/evolvers/src/java/uk/ac/cam/caret/sakai/rsf/evolverimpl/SakaiFCKTextEvolver.java
@@ -1,43 +1,43 @@
/*
* Created on 22 Sep 2006
*/
package uk.ac.cam.caret.sakai.rsf.evolverimpl;
import org.sakaiproject.content.api.ContentHostingService;
import uk.org.ponder.htmlutil.HTMLUtil;
import uk.org.ponder.rsf.components.UIInput;
import uk.org.ponder.rsf.components.UIJointContainer;
import uk.org.ponder.rsf.components.UIVerbatim;
import uk.org.ponder.rsf.evolvers.TextInputEvolver;
public class SakaiFCKTextEvolver implements TextInputEvolver {
public static final String COMPONENT_ID = "sakai-FCKEditor:";
private String context;
private ContentHostingService contentHostingService;
public void setContext(String context) {
this.context = context;
}
public void setContentHostingService(ContentHostingService contentHostingService) {
this.contentHostingService = contentHostingService;
}
public UIJointContainer evolveTextInput(UIInput toevolve) {
toevolve.parent.remove(toevolve);
UIJointContainer joint = new UIJointContainer(toevolve.parent,
toevolve.ID, COMPONENT_ID);
toevolve.ID = SEED_ID; // must change ID while unattached
joint.addComponent(toevolve);
- String collectionID = context.equals("")?
- contentHostingService.getSiteCollection(context) : "";
+ String collectionID = context.equals("")? "" :
+ contentHostingService.getSiteCollection(context);
String js = HTMLUtil.emitJavascriptCall("SakaiProject.fckeditor.initializeEditor",
new String[] {toevolve.getFullID(), collectionID});
UIVerbatim.make(joint, "textarea-js", js);
return joint;
}
}
| true | true | public UIJointContainer evolveTextInput(UIInput toevolve) {
toevolve.parent.remove(toevolve);
UIJointContainer joint = new UIJointContainer(toevolve.parent,
toevolve.ID, COMPONENT_ID);
toevolve.ID = SEED_ID; // must change ID while unattached
joint.addComponent(toevolve);
String collectionID = context.equals("")?
contentHostingService.getSiteCollection(context) : "";
String js = HTMLUtil.emitJavascriptCall("SakaiProject.fckeditor.initializeEditor",
new String[] {toevolve.getFullID(), collectionID});
UIVerbatim.make(joint, "textarea-js", js);
return joint;
}
| public UIJointContainer evolveTextInput(UIInput toevolve) {
toevolve.parent.remove(toevolve);
UIJointContainer joint = new UIJointContainer(toevolve.parent,
toevolve.ID, COMPONENT_ID);
toevolve.ID = SEED_ID; // must change ID while unattached
joint.addComponent(toevolve);
String collectionID = context.equals("")? "" :
contentHostingService.getSiteCollection(context);
String js = HTMLUtil.emitJavascriptCall("SakaiProject.fckeditor.initializeEditor",
new String[] {toevolve.getFullID(), collectionID});
UIVerbatim.make(joint, "textarea-js", js);
return joint;
}
|
diff --git a/atlas-web/src/main/java/ae3/service/structuredquery/AtlasStructuredQueryService.java b/atlas-web/src/main/java/ae3/service/structuredquery/AtlasStructuredQueryService.java
index e33f7d1fe..9da552d30 100644
--- a/atlas-web/src/main/java/ae3/service/structuredquery/AtlasStructuredQueryService.java
+++ b/atlas-web/src/main/java/ae3/service/structuredquery/AtlasStructuredQueryService.java
@@ -1,1922 +1,1928 @@
/*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package ae3.service.structuredquery;
import ae3.model.AtlasGene;
import ae3.model.ListResultRow;
import ae3.model.ListResultRowExperiment;
import ae3.service.AtlasStatisticsQueryService;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.Multiset;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import uk.ac.ebi.gxa.dao.ExperimentDAO;
import uk.ac.ebi.gxa.efo.Efo;
import uk.ac.ebi.gxa.efo.EfoTerm;
import uk.ac.ebi.gxa.exceptions.LogUtil;
import uk.ac.ebi.gxa.index.builder.IndexBuilder;
import uk.ac.ebi.gxa.index.builder.IndexBuilderEventHandler;
import uk.ac.ebi.gxa.netcdf.reader.AtlasNetCDFDAO;
import uk.ac.ebi.gxa.properties.AtlasProperties;
import uk.ac.ebi.gxa.statistics.*;
import uk.ac.ebi.gxa.utils.EfvTree;
import uk.ac.ebi.gxa.utils.EscapeUtil;
import uk.ac.ebi.gxa.utils.Maker;
import uk.ac.ebi.gxa.utils.Pair;
import uk.ac.ebi.microarray.atlas.model.Experiment;
import uk.ac.ebi.microarray.atlas.model.ExpressionAnalysis;
import uk.ac.ebi.microarray.atlas.model.UpDownCondition;
import uk.ac.ebi.microarray.atlas.model.UpDownExpression;
import java.util.*;
import static com.google.common.base.Joiner.on;
import static uk.ac.ebi.gxa.exceptions.LogUtil.createUnexpected;
/**
* Structured query support class. The main query engine of the Atlas.
*
* @author pashky
*/
public class AtlasStructuredQueryService implements IndexBuilderEventHandler, DisposableBean {
// This variable acts as a place holder for a heatmap column index that has not been set yet
private static final int POS_NOT_SET = -1;
// Maximum number of efv columns to be shown by default in web (non-full heatmap) queries.
// In web queries, if some efvs had been trimmed in a given ef, the user can expand that ef to see all
// trimmed efvs.
// Note that MAX_EFV_COLUMNS columns restriction does not apply to API (full heatmap) queries.
private static final int MAX_EFV_COLUMNS = 120;
private static final boolean INCLUDE_EFO_PARENTS_IN_HEATMAP = true;
final private Logger log = LoggerFactory.getLogger(getClass());
private SolrServer solrServerAtlas;
private SolrServer solrServerExpt;
private SolrServer solrServerProp;
private AtlasProperties atlasProperties;
private IndexBuilder indexBuilder;
private AtlasEfvService efvService;
private AtlasEfoService efoService;
private AtlasGenePropertyService genePropService;
private AtlasStatisticsQueryService atlasStatisticsQueryService;
private ExperimentDAO experimentDAO;
private AtlasNetCDFDAO atlasNetCDFDAO;
private CoreContainer coreContainer;
private Efo efo;
private final Set<String> cacheFill = new HashSet<String>();
private SortedSet<String> allSpecies = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
/**
* Hack: prevents OOMs by clearing Lucene field cache by closing the searcher which closes the IndexReader
* (it's the only way now if we don't hack Lucene)
*/
private void controlCache() {
if (coreContainer == null)
return;
synchronized (cacheFill) {
if (cacheFill.size() > 500) {
SolrCore core = coreContainer.getCore(Constants.CORE_ATLAS);
if (core != null) {
core.closeSearcher();
core.close();
}
cacheFill.clear();
}
}
}
/**
* Adds field to cache watcher (it's supposed to estimate number of fields which actually end up in Lucene cache,
* which we can't check directly)
*
* @param field the name of field to add
*/
private void notifyCache(String field) {
synchronized (cacheFill) {
cacheFill.add(field);
}
}
public void setSolrServerAtlas(SolrServer solrServerAtlas) {
this.solrServerAtlas = solrServerAtlas;
}
public void setSolrServerExpt(SolrServer solrServerExpt) {
this.solrServerExpt = solrServerExpt;
}
public void setSolrServerProp(SolrServer solrServerProp) {
this.solrServerProp = solrServerProp;
}
public void setCoreContainer(CoreContainer coreContainer) {
this.coreContainer = coreContainer;
}
public void setExperimentDAO(ExperimentDAO experimentDAO) {
this.experimentDAO = experimentDAO;
}
public void setEfvService(AtlasEfvService efvService) {
this.efvService = efvService;
}
public void setEfoService(AtlasEfoService efoService) {
this.efoService = efoService;
}
public void setAtlasNetCDFDAO(AtlasNetCDFDAO atlasNetCDFDAO) {
this.atlasNetCDFDAO = atlasNetCDFDAO;
}
public void setIndexBuilder(IndexBuilder indexBuilder) {
this.indexBuilder = indexBuilder;
indexBuilder.registerIndexBuildEventHandler(this);
}
public Efo getEfo() {
return efo;
}
public void setEfo(Efo efo) {
this.efo = efo;
}
public void setGenePropService(AtlasGenePropertyService genePropService) {
this.genePropService = genePropService;
}
public void setAtlasProperties(AtlasProperties atlasProperties) {
this.atlasProperties = atlasProperties;
}
public void setAtlasStatisticsQueryService(AtlasStatisticsQueryService atlasStatisticsQueryService) {
this.atlasStatisticsQueryService = atlasStatisticsQueryService;
}
public Set<String> getAllFactors() {
return efvService.getAllFactors();
}
/**
* SOLR query builder class. Collects necessary part of SOLR query string as we go through conditions.
* Can't use just StringBuilder as we need to maintain two separate chains - query itself and scoring function.
* <p/>
* Can be used as chain of calls as all appendXX() methods return self
*/
private static class SolrQueryBuilder {
/**
* Query string
*/
private StringBuilder solrq = new StringBuilder();
/**
* Appends AND to query only if it is needed
*
* @return self
*/
public SolrQueryBuilder appendAnd() {
if (solrq.length() > 0)
solrq.append(" AND ");
return this;
}
/**
* Appends string to query
*
* @param s string
* @return self
*/
public SolrQueryBuilder append(String s) {
solrq.append(s);
return this;
}
/**
* Appends object to query
*
* @param s object
* @return self
*/
public SolrQueryBuilder append(Object s) {
solrq.append(s);
return this;
}
/**
* Appends other SB to query
*
* @param s SB
* @return self
*/
public SolrQueryBuilder append(StringBuilder s) {
solrq.append(s);
return this;
}
/**
* Returns assembled query string
*
* @return string
*/
@Override
public String toString() {
return solrq.toString();
}
/**
* Checks if query is empty
*
* @return true or false
*/
public boolean isEmpty() {
return solrq.length() == 0;
}
}
/**
* Column information class to be used as paylod in result EFV tree. Base version storing just position
* of EFV data in result counters array
*/
private static class BaseColumnInfo implements ColumnInfo {
private int position;
private BaseColumnInfo(int position) {
this.position = position;
}
public int getPosition() {
return position;
}
public int compareTo(ColumnInfo o) {
return getPosition() - o.getPosition();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BaseColumnInfo that = (BaseColumnInfo) o;
if (position != that.position) return false;
return true;
}
@Override
public int hashCode() {
return position;
}
public boolean isQualified(UpdownCounter ud) {
return !ud.isZero();
}
/**
* @return true, if the heatmap column position has been set to a valid (>= 0) value
*/
public boolean isPositionSet() {
return position != POS_NOT_SET;
}
}
/**
* Extended version of columninfo, checking required minimum number of experiments
*/
private static class QueryColumnInfo extends BaseColumnInfo {
private int minUpExperiments = Integer.MAX_VALUE;
private int minDnExperiments = Integer.MAX_VALUE;
private int minOrExperiments = Integer.MAX_VALUE;
private int minNoExperiments = Integer.MAX_VALUE;
private boolean displayNonDECounts = false;
/**
* This constructor is used when QueryState is populated with efos/efvs - using POS_NOT_SET emphasizes the fact
* that no final heatmap column positionality is set in QueryState. All such information is worked out in processResultGenes()
* method call. The order of efo's displayed in the heatmap reflect their order of entry into QueryState. The order of efv's
* (within each ef grouping) however is dictated by the cumulative column experiment counts for each efv and cannot
* be decided until experiment counts for al heatmap cells have been calculated.
*/
private QueryColumnInfo() {
super(POS_NOT_SET);
}
/**
* Update column minimum requirements with provided query information
* (to be called on each query condition)
*
* @param expression query expression
* @param minExperiments minimum number of experiments for this expression
*/
public void update(QueryExpression expression, int minExperiments) {
switch (expression) {
case UP:
case UP_ONLY:
minUpExperiments = Math.min(minExperiments, this.minUpExperiments);
break;
case DOWN:
case DOWN_ONLY:
minDnExperiments = Math.min(minExperiments, this.minDnExperiments);
break;
case UP_DOWN:
minOrExperiments = Math.min(minExperiments, this.minOrExperiments);
break;
case NON_D_E:
minNoExperiments = Math.min(minExperiments, this.minNoExperiments);
displayNonDECounts = true;
break;
case ANY:
minOrExperiments = Math.min(minExperiments, this.minOrExperiments);
minNoExperiments = Math.min(minExperiments, this.minNoExperiments);
displayNonDECounts = true;
break;
}
}
/**
* @return true if non-de counts should be displayed in this column; false otherwise
*/
public boolean displayNonDECounts() {
return displayNonDECounts;
}
/**
* Here it checks counter against minimal numbers
*
* @param ud counter
* @return true or false
*/
public boolean isQualified(UpdownCounter ud) {
return ud.getUps() >= minUpExperiments ||
ud.getDowns() >= minDnExperiments ||
ud.getNones() >= minNoExperiments ||
ud.getUps() >= minOrExperiments ||
ud.getDowns() >= minOrExperiments;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
QueryColumnInfo that = (QueryColumnInfo) o;
if (minDnExperiments != that.minDnExperiments) return false;
if (minNoExperiments != that.minNoExperiments) return false;
if (minOrExperiments != that.minOrExperiments) return false;
if (minUpExperiments != that.minUpExperiments) return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + minUpExperiments;
result = 31 * result + minDnExperiments;
result = 31 * result + minOrExperiments;
result = 31 * result + minNoExperiments;
return result;
}
}
/**
* Internal class to pass query state around methods (the main class itself is stateless hence thread-safe)
*/
private class QueryState {
private final SolrQueryBuilder solrq = new SolrQueryBuilder();
private final EfvTree<ColumnInfo> efvs = new EfvTree<ColumnInfo>();
private final EfoTree<ColumnInfo> efos = new EfoTree<ColumnInfo>(getEfo());
private final Set<Long> experiments = new HashSet<Long>();
private final Set<String> scoringEfos = new HashSet<String>();
/**
* Column numberer factory used to add new EFV columns into heatmap
*/
private Maker<ColumnInfo> numberer = new Maker<ColumnInfo>() {
public ColumnInfo make() {
return new QueryColumnInfo();
}
};
/**
* Returns SOLR query builder
*
* @return solr query builder
*/
public SolrQueryBuilder getSolrq() {
return solrq;
}
/**
* Adds experiment IDs to query
*
* @param ids identifiers of experiments to be added to the query
*/
public void addExperiments(Collection<Long> ids) {
experiments.addAll(ids);
}
/**
* Add a Collection of efos with non-zero bit index experiment counts for the genes to be displayed on the heatmap
*
* @param scoringEfos
*/
public void addScoringEfos(Collection<String> scoringEfos) {
this.scoringEfos.addAll(scoringEfos);
}
/**
* Adds EFV to query EFV tree
*
* @param ef factor
* @param efv value
* @param minExperiments required minimum number of experiments
* @param expression query expression
*/
public void addEfv(String ef, String efv, int minExperiments, QueryExpression expression) {
((QueryColumnInfo) efvs.getOrCreate(ef, efv, numberer)).update(expression, minExperiments);
}
/**
* Adds EFO accession to query EFO tree, (including its efo children for ViewType.LIST)
*
* @param id EFO accession
* @param minExperiments required minimum number of experiments
* @param expression query expression
* @param maxEfoDescendantGeneration Specifies the generation down to which this efo's descendants should be included;
* Integer.MAX_VALUE indicates that all descendants
* should be included recursively
*/
public void addEfo(String id, int minExperiments, QueryExpression expression, int maxEfoDescendantGeneration) {
for (ColumnInfo ci : efos.add(id, numberer, maxEfoDescendantGeneration, !INCLUDE_EFO_PARENTS_IN_HEATMAP))
((QueryColumnInfo) ci).update(expression, minExperiments);
}
/**
* Returns set of experiments mentioned in the query
*
* @return set of experiment IDs
*/
public Set<Long> getExperiments() {
return experiments;
}
/**
* @return Set of efos with non-zero bit index experiment counts for the genes to be displayed on the heatmap
*/
public Set<String> getScoringEfos() {
return scoringEfos;
}
/**
* Returns query EFV tree
*
* @return query EFV tree
*/
public EfvTree<ColumnInfo> getEfvs() {
return efvs;
}
/**
* Returns query EFO tree
*
* @return query EFO tree
*/
public EfoTree<ColumnInfo> getEfos() {
return efos;
}
/**
* Checks if query is empty
*
* @return true or false
*/
public boolean isEmpty() {
return solrq.isEmpty();
}
/**
* Checks if query has any condition EFV/EFOs
*
* @return true if query has EFV or EFO conditions, false otherwise
*/
public boolean hasQueryEfoEfvs() {
return efvs.getNumEfvs() > 0 || efos.getNumEfos() > 0;
}
/**
* Informative string representing the query
*
* @return string representation of the object
*/
@Override
public String toString() {
return "SOLR query: <" + solrq.toString() + ">, Experiments: [" + StringUtils.join(experiments, ", ") + "]";
}
}
/**
* Creates SOLR query from atlas query
*
* @param solrq
* @return solr query object
*/
private SolrQuery getFastGeneSolrQuery(SolrQueryBuilder solrq) {
SolrQuery q = new SolrQuery(solrq.toString());
q.addFacetField("id");
q.setRows(0);
q.setFacet(true);
q.setFacetLimit(-1);
q.setFacetMinCount(1);
log.debug("Simple gene query: " + solrq.toString());
log.debug("Expanded simple gene query: " + q.toString());
return q;
}
private Set<Integer> getGenesByGeneConditionsAndSpecies(Collection<GeneQueryCondition> geneConditions, Collection<String> species) {
Set<Integer> geneIds = new HashSet<Integer>();
SolrQueryBuilder solrq = new SolrQueryBuilder();
appendGeneQuery(geneConditions, solrq);
appendSpeciesQuery(species, solrq);
if (solrq.isEmpty()) {
return geneIds;
}
SolrQuery q = getFastGeneSolrQuery(solrq);
try {
long start = System.currentTimeMillis();
QueryResponse qr = solrServerAtlas.query(q);
if (qr.getFacetFields().get(0).getValues() != null) {
for (FacetField.Count ffc : qr.getFacetFields().get(0).getValues()) {
geneIds.add(Integer.parseInt(ffc.getName()));
}
}
log.info("Simple gene query: " + solrq.toString() + " returned " + geneIds.size() + " gene ids in " + (System.currentTimeMillis() - start) + " ms");
} catch (SolrServerException e) {
throw createUnexpected("Failed to fetch genes by conditions and species using query: '" + solrq.toString() + "'", e);
}
return geneIds;
}
/**
* Process structured Atlas query
*
* @param query parsed query
* @return matching results
*/
public AtlasStructuredQueryResult doStructuredAtlasQuery(final AtlasStructuredQuery query) {
// Flag to indicate if pvals/tstats should be retrieved from bit index and used for heatmap row ordering - for more
// information see documentation for atlas.structured.query.max.* constants in atlas.properties
boolean usePvalsInHeatmapOrdering = true;
final QueryState qstate = new QueryState();
AtlasStructuredQueryResult result = new AtlasStructuredQueryResult(query.getStart(), query.getRowsPerPage(), query.getExpsPerGene());
// Get genes ids from genes index by gene and species query conditions
Set<Integer> genesByGeneConditionsAndSpecies = getGenesByGeneConditionsAndSpecies(query.getGeneConditions(), query.getSpecies());
if (query.getGeneConditions().size() > 0 && genesByGeneConditionsAndSpecies.size() == 0) {
// if the user searched for a non-existent gene - return an empty result set
return result;
}
// Now refine the gene set by retrieving the requested batch size from a list sorted by experiment counts found in bit index
StatisticsQueryCondition statsQuery = new StatisticsQueryCondition();
Collection<ExpFactorResultCondition> conditions = appendEfvsQuery(query, qstate, statsQuery);
if (statsQuery.getStatisticsType() == null) {
statsQuery.setStatisticsType(StatisticsType.UP_DOWN);
}
int mappingCount = 0;
for (EfoTree.EfoItem efoItem : qstate.getEfos().getValueOrderedList()) {
mappingCount += atlasStatisticsQueryService.getMappingsCountForEfo(efoItem.getId());
}
log.debug("genes: " + genesByGeneConditionsAndSpecies.size() + "; efos: " + qstate.getEfos().getNumEfos());
List<Integer> genesByConditions = new ArrayList<Integer>();
Pair<Integer, Integer> counts = atlasStatisticsQueryService.getSortedBioEntities(statsQuery, query.getStart(), query.getRowsPerPage(), genesByGeneConditionsAndSpecies, genesByConditions);
Integer numOfResults = counts.getFirst();
Integer totalExperimentCount = counts.getSecond();
log.info("Total efo mappings count: " + mappingCount + "; total experiment count: " + totalExperimentCount);
// Impose restrictions on mappingCount and totalExperimentCount - for more information see documentation for
// atlas.structured.query.max.* constants in atlas.properties
if (mappingCount > atlasProperties.getMaxEfoMappingsCountForStructuredQuery() ||
totalExperimentCount > atlasProperties.getMaxExperimentCountForStructuredQuery()) {
if (query.isFullHeatmap()) { // API queries
StringBuilder errMsg = new StringBuilder();
errMsg.append("Atlas cannot handle this query in a timely fashion. ");
if (mappingCount > atlasProperties.getMaxEfoMappingsCountForStructuredQuery()) {
errMsg.append("Please try again after restricting the condition part of your query.");
log.warn("API query too complex - efo mapping count: " + mappingCount + " > " + atlasProperties.getMaxEfoMappingsCountForStructuredQuery());
} else {
errMsg.append("Please try again after restricting the gene and/or condition part of your query.");
log.warn("API query too complex - heatmap's total experiment count: " + totalExperimentCount + " > " + atlasProperties.getMaxExperimentCountForStructuredQuery());
}
result.setUserErrorMsg(errMsg.toString());
return result;
} else { // web queries
usePvalsInHeatmapOrdering = false;
}
}
appendGeneQuery(query.getGeneConditions(), qstate.getSolrq());
result.setConditions(conditions);
//TODO Ticket #3069 Solr querying code should be refactored; classes QueryState, SolrQueryBuilder only interfere
if (!qstate.isEmpty() || !genesByConditions.isEmpty()) {
// scoringEfos is used for deciding if an efo term in heatmap header should be made expandable
Set<String> scoringEfos = atlasStatisticsQueryService.getScoringEfosForBioEntities(new HashSet<Integer>(genesByConditions), statsQuery.getStatisticsType());
qstate.addScoringEfos(scoringEfos);
try {
controlCache();
SolrQuery q = setupSolrQuery(query.getRowsPerPage(), qstate);
if (qstate.isEmpty()) {
q.setQuery("*:*");
}
q.addFilterQuery("id:(" + on(" ").join(genesByConditions) + ")");
long timeStart = System.currentTimeMillis();
QueryResponse response = solrServerAtlas.query(q);
log.info("Solr query: " + query.getApiUrl() + ": " + qstate.toString() + " took: " + (System.currentTimeMillis() - timeStart) + " ms");
timeStart = System.currentTimeMillis();
processResultGenes(response, result, qstate, query, numOfResults, statsQuery, usePvalsInHeatmapOrdering);
log.info("processResultGenes took: " + (System.currentTimeMillis() - timeStart) + " ms");
Set<String> expandableEfs = new HashSet<String>();
EfvTree<ColumnInfo> trimmedEfvs = trimColumns(query, result, expandableEfs);
result.setResultEfvs(trimmedEfvs);
result.setExpandableEfs(expandableEfs);
if (response.getFacetFields() != null) {
for (String p : genePropService.getDrilldownProperties()) {
Set<String> hasVals = new HashSet<String>();
for (GeneQueryCondition qc : query.getGeneConditions())
if (qc.getFactor().equals(p))
hasVals.addAll(qc.getFactorValues());
Iterable<FacetCounter> facet = getGeneFacet(response, "property_f_" + p, hasVals);
if (facet.iterator().hasNext())
result.setGeneFacet(p, facet);
}
if (!query.getSpecies().iterator().hasNext())
result.setGeneFacet("species", getGeneFacet(response, "species", new HashSet<String>()));
}
} catch (SolrServerException e) {
log.error("Error in structured query!", e);
}
}
return result;
}
/**
* Trims factors to contain only small amount of EFVs if too many of them were requested
* User can ask to expand some of them
*
* @param query query to process
* @param result result to process
* @param expandableEfs which EFs to expand in result
* @return trimmed result EFV tree
*/
private EfvTree<ColumnInfo> trimColumns(final AtlasStructuredQuery query,
final AtlasStructuredQueryResult result,
Collection<String> expandableEfs) {
final Set<String> expand = query.getExpandColumns();
EfvTree<ColumnInfo> trimmedEfvs = new EfvTree<ColumnInfo>(result.getResultEfvs());
if (expand.contains("*"))
return trimmedEfvs;
if (query.isFullHeatmap() || trimmedEfvs.getNumEfvs() < MAX_EFV_COLUMNS)
return trimmedEfvs;
int threshold = Math.max(1, MAX_EFV_COLUMNS / trimmedEfvs.getNumEfs());
for (EfvTree.Ef<ColumnInfo> ef : trimmedEfvs.getEfValueSortedTree()) {
if (expand.contains(ef.getEf()) || ef.getEfvs().size() < threshold)
continue;
Map<EfvTree.Efv<ColumnInfo>, Double> scores = new HashMap<EfvTree.Efv<ColumnInfo>, Double>();
for (EfvTree.Efv<ColumnInfo> efv : ef.getEfvs())
scores.put(efv, 0.0);
for (StructuredResultRow row : result.getResults()) {
for (EfvTree.Efv<ColumnInfo> efv : ef.getEfvs()) {
UpdownCounter c = row.getCounters().get(efv.getPayload().getPosition());
scores.put(efv, scores.get(efv) + c.getDowns() * (1.0 - c.getMpvDn()) + c.getUps() * (1.0 - c.getMpvUp()));
}
}
@SuppressWarnings("unchecked")
Map.Entry<EfvTree.Efv<ColumnInfo>, Double>[] scoreset = scores.entrySet().toArray(new Map.Entry[1]);
Arrays.sort(scoreset, new Comparator<Map.Entry<EfvTree.Efv<ColumnInfo>, Double>>() {
public int compare(Map.Entry<EfvTree.Efv<ColumnInfo>, Double> o1, Map.Entry<EfvTree.Efv<ColumnInfo>, Double> o2) {
return o2.getValue().compareTo(o1.getValue());
}
});
for (int i = threshold; i < scoreset.length; ++i) {
trimmedEfvs.removeEfv(ef.getEf(), scoreset[i].getKey().getEfv());
expandableEfs.add(ef.getEf());
}
}
return trimmedEfvs;
}
/**
* Finds experiment by search string
*
* @param query search strings
* @param condEfvs EFV tree to fill with EFVs mentioned in experiment
* @return collection of found experiment IDs
* @throws SolrServerException in case of any problem with SOLR
*/
private Collection<Long> findExperiments(String query, EfvTree<Boolean> condEfvs) throws SolrServerException {
List<Long> result = new ArrayList<Long>();
if (query.length() == 0)
return result;
SolrQuery q = new SolrQuery("id:(" + query + ") accession:(" + query + ")");
q.addField("*");
q.setRows(50);
q.setStart(0);
QueryResponse qr = solrServerExpt.query(q);
for (SolrDocument doc : qr.getResults()) {
String id = String.valueOf(doc.getFieldValue("id"));
if (id != null) {
result.add(Long.parseLong(id));
for (String name : doc.getFieldNames())
if (name.startsWith("a_property_"))
for (Object val : doc.getFieldValues(name))
condEfvs.put(name.substring("a_property_".length()), String.valueOf(val), true);
}
}
return result;
}
/**
* @param expression
* @return StatisticsType equivalent of expression
*/
private StatisticsType getStatisticsTypeForExpression(QueryExpression expression) {
if (QueryExpression.ANY == expression) {
// If the user selects ANY expression, we still default to UP_DOWN, with the proviso that
// non-de counts will be shown in heatmap (if the user select UP_DOWN, non-de counts are excluded from heatmap)
expression = QueryExpression.UP_DOWN;
}
return StatisticsType.valueOf(expression.toString());
}
/**
* Appends conditions part of the query to query state. Finds matching EFVs/EFOs and appends them to SOLR query string.
*
* @param query query
* @param qstate state
* @return iterable conditions resulted from this append
*/
private Collection<ExpFactorResultCondition> appendEfvsQuery(final AtlasStructuredQuery query, final QueryState qstate, StatisticsQueryCondition statsQuery) {
final List<ExpFactorResultCondition> conds = new ArrayList<ExpFactorResultCondition>();
// TODO SolrQueryBuilder solrq = qstate.getSolrq();
for (ExpFactorQueryCondition c : query.getConditions()) {
if (statsQuery.getStatisticsType() == null) {
statsQuery.setStatisticsType(getStatisticsTypeForExpression(c.getExpression()));
}
List<Attribute> orAttributes = null;
if (c.isAnything() || c.isAnyValue()) {
// do nothing
} else if (c.isOnly() && !c.isAnyFactor()
&& !Constants.EFO_FACTOR_NAME.equals(c.getFactor())) {
try {
EfvTree<Boolean> condEfvs = getCondEfvsForFactor(c.getFactor(), c.getFactorValues());
EfvTree<Boolean> allEfvs = getCondEfvsAllForFactor(c.getFactor());
if (condEfvs.getNumEfs() + allEfvs.getNumEfs() > 0) {
// TODO solrq.appendAnd().append("((");
for (EfvTree.EfEfv<Boolean> condEfv : condEfvs.getNameSortedList()) {
// TODO solrq.append(" ");
String efefvId = condEfv.getEfEfvId();
// TODO solrq.appendExpFields(efefvId, c.getExpression(), c.getMinExperiments());
// TODO solrq.appendExpScores(efefvId, c.getExpression());
notifyCache(efefvId + c.getExpression());
qstate.addEfv(condEfv.getEf(), condEfv.getEfv(), c.getMinExperiments(), c.getExpression());
}
// TODO solrq.append(")");
for (EfvTree.EfEfv<Boolean> allEfv : allEfvs.getNameSortedList())
if (!condEfvs.has(allEfv.getEf(), allEfv.getEfv())) {
String efefvId = allEfv.getEfEfvId();
// TODO solrq.append(" AND NOT (");
// TODO solrq.appendExpFields(efefvId, c.getExpression(), 1);
// TODO solrq.append(")");
notifyCache(efefvId + c.getExpression());
qstate.addEfv(allEfv.getEf(), allEfv.getEfv(), 1, QueryExpression.UP_DOWN);
}
// TODO solrq.append(")");
conds.add(new ExpFactorResultCondition(c,
Collections.<List<AtlasEfoService.EfoTermCount>>emptyList(),
false));
}
} catch (SolrServerException e) {
log.error("Error querying Atlas index", e);
}
} else {
orAttributes = new ArrayList<Attribute>();
try {
boolean nonemptyQuery = false;
EfvTree<Boolean> condEfvs = getConditionEfvs(c);
if (condEfvs.getNumEfs() > 0) {
// If the number of efv/efo conditions matching user's query exceeds MAX_EFV_COLUMNS, we don't
// search with/show efos.
// Note that efos are also not shown on heatmap for gene condition-only queries,
// except that only DAS ef's are shown in the heatmap with three efvs per factor.
// If user's query does contain efv/efo conditions, all matching efvs are searched for/included in heatmap,
// and though heatmap is trimmed to max MAX_EFV_COLUMNS the user can click on 'expand' link under
// each factor to see all efvs.
List<EfvTree.EfEfv<Boolean>> conditions = condEfvs.getNameSortedList();
boolean excludeEfos = false;
int totalConditions = conditions.size();
log.info("User query matched: " + totalConditions + " efv/efo conditions");
if (totalConditions > MAX_EFV_COLUMNS) {
excludeEfos = true;
}
for (EfvTree.EfEfv<Boolean> condEfv : conditions) {
String efefvId = condEfv.getEfEfvId();
notifyCache(efefvId + c.getExpression());
Attribute attribute;
// If ef key equals EFO_WITH_CHILDREN_PREAMBLE (c.f. getCondEfvsForFactor()), set
// includeEfoChildren flag for condEfv.getEfv() efo term.
String ef = condEfv.getEf();
// includeEfoDescendantGeneration == 0 ==> don't include any children
// includeEfoDescendantGeneration == 1 ==> include immediate children only
// includeEfoDescendantGeneration == 2 ==> include immediate children and grandchildren only
// ...
// includeEfoDescendantGeneration == Integer.MAX_VALUE ==> include all descendants recursively
// For List view and for API queries, include all children recursively; otherwise (i.e. for heatmap web queries).
// always include immediate children and grandchildren only
int maxEfoDescendantGeneration = (query.getViewType() == ViewType.LIST || query.isFullHeatmap() ? Integer.MAX_VALUE : 2);
if (Constants.EFO_FACTOR_NAME.equals(ef) || Constants.EFO_WITH_CHILDREN_PREAMBLE.equals(ef)) {
if (!excludeEfos) {
qstate.addEfo(condEfv.getEfv(), c.getMinExperiments(), c.getExpression(), maxEfoDescendantGeneration);
attribute = new EfoAttribute(condEfv.getEfv(), getStatisticsTypeForExpression(c.getExpression()));
orAttributes.add(attribute);
}
} else {
qstate.addEfv(condEfv.getEf(), condEfv.getEfv(), c.getMinExperiments(), c.getExpression());
attribute = new EfvAttribute(condEfv.getEf(), condEfv.getEfv(), getStatisticsTypeForExpression(c.getExpression()));
orAttributes.add(attribute);
}
}
nonemptyQuery = true;
}
Collection<List<AtlasEfoService.EfoTermCount>> efoPaths = new ArrayList<List<AtlasEfoService.EfoTermCount>>();
Collection<EfvTree.Efv<Boolean>> condEfos = condEfvs.getEfvs(Constants.EFO_FACTOR_NAME);
for (EfvTree.Efv<Boolean> efv : condEfos) {
efoPaths.addAll(efoService.getTermParentPaths(efv.getEfv()));
}
conds.add(new ExpFactorResultCondition(c, efoPaths, !nonemptyQuery));
} catch (SolrServerException e) {
log.error("Error querying Atlas index", e);
}
}
if (orAttributes != null) {
statsQuery.and(atlasStatisticsQueryService.getStatisticsOrQuery(orAttributes, c.getMinExperiments()));
log.debug("Adding the following " + orAttributes.size() + " attributes to stats query: " + orAttributes);
}
}
return conds;
}
/**
* Appends gene part of the query. Parses query condtions and appends them to SOLR query string.
*
* @param geneConditions
* @param solrq solr query
*/
private void appendGeneQuery(Collection<GeneQueryCondition> geneConditions, SolrQueryBuilder solrq) {
for (GeneQueryCondition geneQuery : geneConditions) {
String escapedQ = geneQuery.getSolrEscapedFactorValues();
if (geneQuery.isAnyFactor()) {
solrq.appendAnd();
if (geneQuery.isNegated())
solrq.append(" NOT ");
solrq.append("(name:(").append(escapedQ).append(") species:(").append(escapedQ)
.append(") identifier:(").append(escapedQ).append(") id:(").append(escapedQ).append(")");
for (String p : genePropService.getIdNameDescProperties())
solrq.append(" property_").append(p).append(":(").append(escapedQ).append(")");
solrq.append(") ");
} else if (Constants.GENE_PROPERTY_NAME.equals(geneQuery.getFactor())) {
solrq.appendAnd();
if (geneQuery.isNegated())
solrq.append(" NOT ");
solrq.append("(name:(").append(escapedQ).append(") ");
solrq.append("identifier:(").append(escapedQ).append(") ");
solrq.append("id:(").append(escapedQ).append(") ");
for (String nameProp : genePropService.getNameProperties())
solrq.append("property_" + nameProp + ":(").append(escapedQ).append(") ");
solrq.append(")");
} else if (genePropService.getDescProperties().contains(geneQuery.getFactor())
|| genePropService.getIdProperties().contains(geneQuery.getFactor())) {
solrq.appendAnd();
if (geneQuery.isNegated())
solrq.append(" NOT ");
String field = "property_" + geneQuery.getFactor();
solrq.append(field).append(":(").append(escapedQ).append(")");
}
}
}
/**
* Appends species part of the query to SOLR query
*
* @param speciesConditions
* @param solrq solr query
*/
private void appendSpeciesQuery(Collection<String> speciesConditions, SolrQueryBuilder solrq) {
Set<String> species = new HashSet<String>();
for (String s : speciesConditions)
for (String as : getSpeciesOptions())
if (as.toLowerCase().contains(s.toLowerCase()))
species.add(as);
if (!species.isEmpty()) {
solrq.appendAnd().append("species:(").append(EscapeUtil.escapeSolrValueList(species)).append(")");
}
}
/**
* Returns tree of EFO/EFVs matching one specified query condition
* EFOs are stored under "magic" factor named "efo" at this point, they will go to EfoTree later
* <p/>
* This is dispatcher function calling one of specific for several query condtion cases. See the code.
*
* @param c condition
* @return tree of EFVs/EFO
* @throws SolrServerException in case of any problems with SOLR
*/
private EfvTree<Boolean> getConditionEfvs(QueryCondition c) throws SolrServerException {
if (c.isAnyValue())
return getCondEfvsAllForFactor(c.getFactor());
if (c.isAnyFactor())
return getCondEfvsForFactor(null, c.getFactorValues());
return getCondEfvsForFactor(c.getFactor(), c.getFactorValues());
}
/**
* Returns all EFVs/EFOs for specified factor
*
* @param factor factor
* @return tree of EFVs/EFO
*/
private EfvTree<Boolean> getCondEfvsAllForFactor(String factor) {
EfvTree<Boolean> condEfvs = new EfvTree<Boolean>();
if (Constants.EFO_FACTOR_NAME.equals(factor)) {
Efo efo = getEfo();
int i = 0;
for (String v : efo.getRootIds()) {
condEfvs.put(Constants.EFO_FACTOR_NAME, v, true);
if (++i >= MAX_EFV_COLUMNS) {
break;
}
}
} else {
int i = 0;
for (String v : efvService.listAllValues(factor)) {
condEfvs.put(factor, v, true);
if (++i >= MAX_EFV_COLUMNS) {
break;
}
}
}
return condEfvs;
}
/**
* Returns matching EFVs/EFOs for factor
*
* @param factor factor
* @param values values search strings
* @return tree of EFVs/EFO
* @throws SolrServerException in case of any problems with SOLR
*/
private EfvTree<Boolean> getCondEfvsForFactor(final String factor, final Iterable<String> values) throws SolrServerException {
EfvTree<Boolean> condEfvs = new EfvTree<Boolean>();
if (Constants.EFO_FACTOR_NAME.equals(factor) || null == factor) {
Efo efo = getEfo();
for (String v : values) {
String efKey = Constants.EFO_FACTOR_NAME;
// If v (efo id) is pre-ambled with EFO_WITH_CHILDREN_PREAMBLE, flag it in condEfvs for inclusion
// of children by using EFO_WITH_CHILDREN_PREAMBLE as the key pointing to the EfoTerm corresponding to v
if (v.startsWith(Constants.EFO_WITH_CHILDREN_PREAMBLE)) {
efKey = Constants.EFO_WITH_CHILDREN_PREAMBLE;
v = v.substring(Constants.EFO_WITH_CHILDREN_PREAMBLE.length());
}
for (EfoTerm term : efo.searchTerm(EscapeUtil.escapeSolr(v))) {
condEfvs.put(efKey, term.getId(), true);
}
}
}
if (Constants.EFO_FACTOR_NAME.equals(factor))
return condEfvs;
String queryString = EscapeUtil.escapeSolrValueList(values);
if (factor != null)
queryString = "(" + queryString + ") AND property:" + EscapeUtil.escapeSolr(factor);
SolrQuery q = new SolrQuery(queryString);
q.setRows(10000);
q.setStart(0);
q.setFields("*");
QueryResponse qr = solrServerProp.query(q);
for (SolrDocument doc : qr.getResults()) {
String ef = (String) doc.getFieldValue("property");
String efv = (String) doc.getFieldValue("value");
condEfvs.put(ef, efv, true);
}
return condEfvs;
}
/**
* This method returns a local cache to avoid re-loading bit stats for a given efo.efv term in
* consecutive heat map rows
*
* @return Map: stat type -> Map: efo/efv -> Multiset<Integer> of aggregate scores for gene indexes
*/
public Map<StatisticsType, HashMap<String, Multiset<Integer>>> getScoresCache() {
Map<StatisticsType, HashMap<String, Multiset<Integer>>> statTypeToEfoToScores
= new HashMap<StatisticsType, HashMap<String, Multiset<Integer>>>();
Set<StatisticsType> statTypesToBeCached = new HashSet<StatisticsType>();
statTypesToBeCached.add(StatisticsType.UP);
statTypesToBeCached.add(StatisticsType.DOWN);
statTypesToBeCached.add(StatisticsType.NON_D_E);
for (StatisticsType statisticsType : statTypesToBeCached) {
statTypeToEfoToScores.put(statisticsType, new HashMap<String, Multiset<Integer>>());
}
return statTypeToEfoToScores;
}
/**
* @param scoresCache
* @param statType
* @param efoOrEfv
* @return Multiset<Integer> of aggregate scores for gene indexes stored in cache under statType-> efoOrEfv
*/
public Multiset<Integer> getScoresFromCache(
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache,
StatisticsType statType,
String efoOrEfv) {
if (scoresCache != null)
return scoresCache.get(statType).get(efoOrEfv);
return null;
}
/**
* @param scoresCache - cache that stores experiment counts for geneIndexes - if it doesn't contain the required count, populate it.
* geneIndexes contains indexes of all genes of interest for the current query (including geneId)
* @param attribute
* @param bioEntityId
* @param bioEntityIdRestrictionSet
* @return experiment count for statType, efvOrEfo, geneId
*/
private int getExperimentCountsForBioEntity(
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache,
Attribute attribute,
Integer bioEntityId,
Set<Integer> bioEntityIdRestrictionSet) {
Multiset<Integer> scores = getScoresFromCache(scoresCache, attribute.getStatType(), attribute.getValue());
if (scores != null) {
return scores.count(bioEntityId);
}
return atlasStatisticsQueryService.getExperimentCountsForBioEntity(attribute, bioEntityId, bioEntityIdRestrictionSet, scoresCache);
}
/**
* Finds all efv attributes for which at least one gene in geneRestrictionSet has experiment counts of statisticType.
* All found efv attributes are added to QueryState.
* C.f. call to this method in processResultGenes().
*
* @param bioEntityIdRestrictionSet gene set of interest
* @param autoFactors list of experimental factors to be included in heatmap
* @param qstate QueryState
* @param statisticType chosen by the user in the simple query screen (if the user has no chosen any efv/efo conditions,
* this statistic type will be used to find out scoring Attributes for that statistic type)
*/
private void populateScoringAttributes(
final Set<Integer> bioEntityIdRestrictionSet,
final Collection<String> autoFactors,
QueryState qstate,
StatisticsType statisticType,
boolean isFullHeatMap
) {
List<Multiset.Entry<EfvAttribute>> attrCountsSortedDescByExperimentCounts =
atlasStatisticsQueryService.getScoringAttributesForBioEntities(bioEntityIdRestrictionSet, statisticType, autoFactors);
Multiset<EfvAttribute> efAttrCounts = HashMultiset.create();
for (Multiset.Entry<EfvAttribute> attrCount : attrCountsSortedDescByExperimentCounts) {
EfvAttribute attr = attrCount.getElement();
if (autoFactors.contains(attr.getEf()) && attr.getEfv() != null && !attr.getEfv().isEmpty()) {
EfvAttribute efAttrIndex = new EfvAttribute(attr.getEf(), null);
// restrict the amount of efvs shown for each ef to max atlasProperties.getMaxEfvsPerEfInHeatmap()
if (isFullHeatMap || efAttrCounts.count(efAttrIndex) < atlasProperties.getMaxEfvsPerEfInHeatmap()) {
qstate.addEfv(attr.getEf(), attr.getEfv(), 1, QueryExpression.valueOf(statisticType.toString()));
efAttrCounts.add(efAttrIndex);
}
}
}
}
/**
* @param docs
* @return Set of gene ids from list of Solr document retrieved from the gene Solr index.
* This set will serve as a gene restriction list for any subsequent StatisticsStorage queries.
*/
private Set<Integer> getBioEntityIdRestrictionSet(SolrDocumentList docs) {
Set<Integer> bioEntityRestrictionSet = new HashSet<Integer>();
for (SolrDocument doc : docs) {
Object idObj = doc.getFieldValue("id");
if (idObj != null) {
bioEntityRestrictionSet.add((Integer) idObj);
}
}
return bioEntityRestrictionSet;
}
/**
* @param scoresCache
* @param attribute
* @param bioEntityId
* @param bioEntityIdRestrictionSet
* @param showNonDEData
* @param usePvalsInHeatmapOrdering
* @return get up/dn/nonde stats for geneId, efo/refv attribute; restrict bitstats query to geneRestrictionSet only
*/
public UpdownCounter getStats(
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache,
Attribute anAttribute,
Integer bioEntityId,
Set<Integer> bioEntityIdRestrictionSet,
boolean showNonDEData,
boolean usePvalsInHeatmapOrdering
) {
Attribute attribute = anAttribute.withStatType(StatisticsType.UP);
int upCnt = getExperimentCountsForBioEntity(scoresCache, attribute, bioEntityId, bioEntityIdRestrictionSet);
attribute = anAttribute.withStatType(StatisticsType.DOWN);
int downCnt = getExperimentCountsForBioEntity(scoresCache, attribute, bioEntityId, bioEntityIdRestrictionSet);
int nonDECnt = 0;
if (showNonDEData) {
attribute = anAttribute.withStatType(StatisticsType.NON_D_E);
nonDECnt = getExperimentCountsForBioEntity(scoresCache, attribute, bioEntityId, bioEntityIdRestrictionSet);
}
float minPValUp = 0;
float minPValDown = 0;
if (usePvalsInHeatmapOrdering) {
minPValUp = 1;
minPValDown = 1;
long start = System.currentTimeMillis();
if (upCnt > 0) {
// Get best up pValue
attribute = anAttribute.withStatType(StatisticsType.UP);
List<ExperimentResult> bestUpExperimentsForAttribute = atlasStatisticsQueryService.getExperimentsSortedByPvalueTRank(bioEntityId, attribute, 0, 1);
if (bestUpExperimentsForAttribute.isEmpty()) {
throw LogUtil.createUnexpected("Failed to retrieve best UP experiment for geneId: " + bioEntityId + "); attr: " + attribute + " despite the UP count: " + upCnt);
}
minPValUp = bestUpExperimentsForAttribute.get(0).getPValTStatRank().getPValue();
}
if (downCnt > 0) {
// Get best down pValue
attribute = anAttribute.withStatType(StatisticsType.DOWN);
List<ExperimentResult> bestDownExperimentsForAttribute = atlasStatisticsQueryService.getExperimentsSortedByPvalueTRank(bioEntityId, attribute, 0, 1);
if (bestDownExperimentsForAttribute.isEmpty()) {
throw LogUtil.createUnexpected("Failed to retrieve best DOWN experiment for geneId: " + bioEntityId + "; attr: " + attribute + " despite the DOWN count: " + downCnt);
}
minPValDown = bestDownExperimentsForAttribute.get(0).getPValTStatRank().getPValue();
}
if (minPValUp != 1 || minPValDown != 1)
log.debug("Retrieved best UP & DOWN pVals: (" + minPValUp + " : " + minPValDown + ") for geneId: " + bioEntityId + "; attr: " + attribute +
"' in: " + (System.currentTimeMillis() - start) + " ms");
}
return new UpdownCounter(
upCnt,
downCnt,
nonDECnt,
minPValUp,
minPValDown);
}
/**
* Processes SOLR query response and generates Atlas structured query result
*
* @param response SOLR response
* @param result ATlas result
* @param qstate query state
* @param query query itself
* @param numOfResults
* @param statisticsQuery specified in user's query (if the user has not chosen any efv/efo conditions,
* the statistics type in this query will be used to find out scoring Attributes for that statistic type)
* @param usePvalsInHeatmapOrdering if true, retrieve pval/tstats from bit index; otherwise don't.
* @throws SolrServerException
*/
private void processResultGenes(QueryResponse response,
AtlasStructuredQueryResult result,
QueryState qstate,
AtlasStructuredQuery query,
Integer numOfResults,
StatisticsQueryCondition statisticsQuery,
boolean usePvalsInHeatmapOrdering
) throws SolrServerException {
// Note that this method processes results from the query assembled from an already sorted list of
// gene id () got from an earlier atlasStatisticsQueryService.getSortedGenes() call). However, by default Solr
// returns its results in the order specified by its notion of relevancy, and I don't know of any way of
// forcing Solr to return results in the order of explicit field values in the query.
// In effect, the earlier sorting itself is lost, though it still has a crucial function of identifying
// the required chunk from the overall sorted list of genes for the user's query.
// To re-instate the sorting we enter the newly constructed StructuredResultRows into the TreeSet below (ordered
// according to compareTo() method in StructuredResultRow)
SortedSet<StructuredResultRow> structuredResultRows = new TreeSet<StructuredResultRow>();
// The list below stores list of StructuredResultRow's until all the (efo and efv) counters have been added
// to each row. Only after that is this list sorted by the cumulative experiment counts in each row.
List<StructuredResultRow> unsortedHeatmapRows = new ArrayList<StructuredResultRow>();
// Initialise scores cache to store efo counts for the group of genes of interest to this query.
// For each heat map row other than the first, the cache will be hit instead of AtlasStatisticsQueryService
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache = getScoresCache();
SolrDocumentList docs = response.getResults();
EfvTree<ColumnInfo> resultEfvs = new EfvTree<ColumnInfo>();
EfoTree<ColumnInfo> resultEfos = qstate.getEfos();
Iterable<EfvTree.EfEfv<ColumnInfo>> efvList = qstate.getEfvs().getValueSortedList();
Iterable<EfoTree.EfoItem<ColumnInfo>> efoList = qstate.getEfos().getValueOrderedList();
boolean hasQueryEfoEfvs = qstate.hasQueryEfoEfvs();
// heatmap column numberer
Maker<ColumnInfo> numberer = new Maker<ColumnInfo>() {
private int num = 0;
public ColumnInfo make() {
return new BaseColumnInfo(num++);
}
};
Collection<String> autoFactors;
if ((!query.getConditions().isEmpty() && !query.getConditions().iterator().next().isAnything())
|| query.isFullHeatmap()) {
autoFactors = efvService.getAllFactors();
} else {
// If the user hasn't specified any conditions or query.isFullHeatmap() is false (the default for heatmap),
// choose only 'usual factors of interest' - as shown in GXA DAS source
autoFactors = atlasProperties.getDasFactors();
}
// timing collection variables
long overallBitStatsProcessingTime = 0;
long overallNcdfAccessTimeForListView = 0;
long overallBitStatsProcessingTimeForListView = 0;
// Retrieve from docs the gene restriction list to be used in subsequent StatisticsStorage queries.
Set<Integer> bioEntityIdRestrictionSet = getBioEntityIdRestrictionSet(docs);
if (!hasQueryEfoEfvs) {
long timeStart = System.currentTimeMillis();
populateScoringAttributes(bioEntityIdRestrictionSet, autoFactors, qstate, statisticsQuery.getStatisticsType(), query.isFullHeatmap());
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
List<EfvTree.EfEfv<ColumnInfo>> scoringEfvs = qstate.getEfvs().getValueSortedList();
log.info("User query contained no efv/efo conditions; collected " + scoringEfvs.size() + " scoring efv conditions in " + diff + " ms");
efvList = scoringEfvs;
}
// This map stores HeatMapColumn object for each EfvTree.EfEfv processed in the loop below.
// Note that HeatMapColumn contains a list of counters that is a vertical slice through heatmap, specific
// to that EfvTree.EfEfv. HeatMapColumn objects wll be used to sort efv heatmap columns by their cumulative
// experiment counts before adding appropriate counters to the horizontal slices (per gene) displayed in heatmap,
// i.e. StructuredResultRow's in unsortedHeatmapRows.
Map<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn> efvToColumn = new HashMap<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn>();
log.debug("Processing " + numOfResults + " result bioentities...");
result.setTotal(numOfResults);
int added = 0;
for (SolrDocument doc : docs) {
long hmRowStart = System.currentTimeMillis();
long overallBitStatsProcessingTimeForHeatMapRow = 0;
Object idObj = doc.getFieldValue("id");
if (idObj == null) {
continue;
}
Integer bioEntityId = (Integer) idObj;
AtlasGene gene = new AtlasGene(doc);
if (response.getHighlighting() != null)
gene.setGeneHighlights(response.getHighlighting().get(bioEntityId.toString()));
// counters will contain a list of heatmap cell payloads
List<UpdownCounter> efoCounters = new ArrayList<UpdownCounter>() {
@Override
public UpdownCounter get(int index) {
if (index < size())
return super.get(index);
else
return new UpdownCounter(0, 0, 0, 0, 0);
}
};
// This variable will determine if a StructuredResultRow will be displayed;
// it is set to true of at least one cell has counts greater than minExperiments - used in StructuredResultRow constructor.
boolean rowQualifies = false;
// Now calculate up/dn/nonde counts for all efvs/efos for the current gene
Iterator<EfvTree.EfEfv<ColumnInfo>> itEfv = efvList.iterator();
Iterator<EfoTree.EfoItem<ColumnInfo>> itEfo = efoList.iterator();
// attrToCounter is used to construct list view and stores mapping between attributes derived from processed efo terms and
// their corresponding statistics counters
Map<EfvAttribute, UpdownCounter> attrToCounter = new HashMap<EfvAttribute, UpdownCounter>();
EfvTree.EfEfv<ColumnInfo> efEfv = null;
EfoTree.EfoItem<ColumnInfo> efoItem = null;
while (itEfv.hasNext() || itEfo.hasNext() || efEfv != null || efoItem != null) {
if (itEfv.hasNext() && efEfv == null) {
efEfv = itEfv.next();
}
if (itEfo.hasNext() && efoItem == null) {
efoItem = itEfo.next();
}
UpdownCounter counter;
boolean usingEfv = efoItem == null || (efEfv != null && efEfv.getPayload().compareTo(efoItem.getPayload()) < 0);
if (usingEfv) {
String ef = efEfv.getEf();
String efv = efEfv.getEfv();
EfvAttribute attr = new EfvAttribute(ef, efv, null);
if (!attrToCounter.containsKey(attr)) {
// 1. In the list view: the above test prevents querying bit index for the same attribute more then once,
// e.g. when an efo is also processed (and thus broken down into efvs it maps to - c.f. below)
// for the current gene that maps to that attribute
// 2. In the heatmap view, the use of attrToCounter is not essential, but it innocuous
long timeStart = System.currentTimeMillis();
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efEfv.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
attrToCounter.put(attr, counter);
}
if (!efvToColumn.containsKey(efEfv)) {
efvToColumn.put(efEfv, new HeatMapColumn(efEfv));
}
efvToColumn.get(efEfv).addRowCounter(attrToCounter.get(attr));
if (efEfv.getPayload().isQualified(attrToCounter.get(attr))) {
rowQualifies = true;
efvToColumn.get(efEfv).setQualifies(true);
+ } else { // Counter for attr doesn't qualify
+ if (query.getViewType() == ViewType.LIST) {
+ // In list view we display gene-ef-efv rows, hence if a given ef-efv counter doesn't qualify
+ // remove it from the rows to be displayed for the current gene
+ attrToCounter.remove(attr);
+ }
}
efEfv = null;
} else {
String efoTerm = efoItem.getId();
if (query.getViewType() == ViewType.LIST) { // efo's in list view
Set<EfvAttribute> attrsForEfo = atlasStatisticsQueryService.getAttributesForEfo(efoTerm);
long timeStart = System.currentTimeMillis();
for (EfvAttribute attr : attrsForEfo) {
if (!attrToCounter.containsKey(attr)) {
// the above test prevents querying bit index for the same attribute more then once - if more
// than one efo processed here maps to that attribute (e.g. an efo's term and its parent)
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
attrToCounter.put(attr, counter);
}
}
}
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForListView += diff;
} else { // efo's in heatmap
// Get statistics for efoTerm-gene
long timeStart = System.currentTimeMillis();
// third param is not important below in getStats() - as we get counts for all stat types anyway
Attribute attr = new EfoAttribute(efoTerm, null);
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
if (!resultEfos.getPayload(efoTerm).isPositionSet()) {
// If the final heatmap column position has not yet been set (e.g. while processing
// a previous gene in the main loop), set it now
resultEfos.setPayload(efoTerm, numberer.make());
}
// Accumulate efo counters
efoCounters.add(counter);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
// Mark efo for displaying in heatmap it the experiment counts in this cell qualify it
resultEfos.mark(efoItem.getId(), !INCLUDE_EFO_PARENTS_IN_HEATMAP);
// Tag efoItem as non-expandable in heatmap header - if applicable
resultEfos.setNonExpandableIfApplicable(efoItem.getId(), qstate.getScoringEfos());
} else {
log.debug("Rejecting " + efoItem.getId() + " for bioentity " + bioEntityId + " as score does not satisfy min experiments condition");
}
}
efoItem = null;
}
}
// Store a Structured row (with just efo counters in it for now) in unsortedHeatmapRows. Efv counters will be added
// below once the efv columns have been sorted by their cumulative experiment counts.
/** TODO
* In advanced (though never simple) i/f queries, heatmap columns will correspond to ef-efv with different expressions in the user's query.
* For example, in the case of 'UP in heart and DOWN in lung' query, the required expression for a group of heart-related efv columns
* will be UP, and the required expression for another group of lung-related columns will be DOWN.
* In such cases, ef-efv-specific StatisticsType(s) should be passed to StructuredResultRow constructor. Then the sorting of unsortedHeatmapRows
* in the final heatmap would truly reflect the user's query. As things stand now, a simplification has been made that only
* the first clause's stat type (stored in statisticsQuery.getStatisticsType()) is passed to StructuredResultRow constructor. Consequently,
* unsortedHeatmapRows are currently sorted by the aggregate counts corresponding to statistics type of the first AND clause only.
*/
unsortedHeatmapRows.add(new StructuredResultRow(gene, efoCounters, rowQualifies, statisticsQuery.getStatisticsType()));
// Now process for list view all attributes in attrToCounter (mapped to by efo's processed above)
if (query.getViewType() == ViewType.LIST) {
for (Map.Entry<EfvAttribute, UpdownCounter> entry : attrToCounter.entrySet()) {
final EfvAttribute attribute = entry.getKey();
boolean displayNonDECounts = entry.getValue().getNones() > 0;
Pair<Long, Long> queryTimes = loadListExperiments(result, gene, attribute.getEf(), attribute.getEfv(), entry.getValue(), qstate.getExperiments(), displayNonDECounts);
overallBitStatsProcessingTime += queryTimes.getFirst();
overallBitStatsProcessingTimeForListView += queryTimes.getFirst();
overallNcdfAccessTimeForListView += queryTimes.getSecond();
}
}
log.debug("Processed gene: " + gene.getGeneName() + " in: " + (System.currentTimeMillis() - hmRowStart) + "; bit stats time: " + overallBitStatsProcessingTimeForHeatMapRow);
}
// So far we accumulated rows of counters for all efos into unsortedHeatmapRows
// We have also accumulated Efv columns date in efvToColumn
// What we need to do now is the following:
// 1. Eliminate columns that don't qualify to be displayed in heatmap
// 2. Sort HeatMapColumns in efvToColumn.values() according to the cumulative column counter and
// 3. Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads
// Remove non-qualifying columns
Collection<HeatMapColumn> qualifyingColumns = Collections2.filter(efvToColumn.values(),
new Predicate<HeatMapColumn>() {
public boolean apply(HeatMapColumn col) {
return col.qualifies();
}
});
List<HeatMapColumn> efvColumns = new ArrayList<HeatMapColumn>(qualifyingColumns);
// Sort efv columns by the their cumulative experiment counts in each column
Collections.sort(efvColumns);
// Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads.
for (HeatMapColumn hmColumn : efvColumns) {
EfvTree.EfEfv<ColumnInfo> efEfv = hmColumn.getEfEfv();
efEfv.setPayload(numberer.make()); // Set position for each efv column after the above sort
int row = 0;
for (StructuredResultRow structuredRow : unsortedHeatmapRows) {
UpdownCounter counter = hmColumn.getRowCounter(row++);
structuredRow.addCounter(counter);
if (efEfv.getPayload().isQualified(counter)) {
resultEfvs.put(efEfv);
}
}
}
// Now that unsortedHeatmapRows has both efo and efv data, where:
// efo columns are sorted by the order in which they were entered into QueryState
// efv columns are sorted by their cumulative column experiment counts (efv's with highest experiment counts moving to the left)
// - we can transfer unsortedHeatmapRows to structuredResultRows. Because the latter is a TreeSet, heatmap rows
// will now be sorted according to their aggregated row counts, with genes with highest experiment counts moving
// to the top of the heatmap
structuredResultRows.addAll(unsortedHeatmapRows);
log.debug("structuredResultRows.size() = " + structuredResultRows.size() + "; added = " + added);
// Returned results sorted by geneScore, eliminating that had zero qualifying score (i.e. all the scores added for
// all efvs where the counts were >= min experiments)
for (StructuredResultRow row : structuredResultRows) {
if (row.qualifies()) {
result.addResult(row);
} else {
log.info("Excluding from heatmap row for gene: " + row.getGene().getGeneName());
}
}
log.info("Overall bitstats processing time: " + overallBitStatsProcessingTime + " ms");
if (query.getViewType() == ViewType.LIST) {
log.info("Overall listview-related bitstats processing time: " + overallBitStatsProcessingTimeForListView + " ms");
log.info("Overall listview-related ncdf querying time: " + overallNcdfAccessTimeForListView + " ms");
}
result.setResultEfvs(resultEfvs);
result.setResultEfos(resultEfos);
log.info("Retrieved query completely: " + result.getSize() + " records of " +
result.getTotal() + " total starting from " + result.getStart());
log.debug("Resulting EFVs are: " + resultEfvs.getNameSortedList().size());
log.debug("Resulting EFOs are: " + resultEfos.getMarkedSubTreeList().size());
}
/**
* Loads experiments data for list view, where each list row corresponds to a single gene-ef-efv combination and each gene
* can have at most result.getRowsPerGene() list rows
*
* @param result atlas result
* @param gene gene id
* @param ef ef
* @param efv efv
* @param counter up/down/nonde expression experiment counts
* @param experiments query experiments
* @return Pair of total times spent on index and ncdf queries respectively
*/
private Pair<Long, Long> loadListExperiments(
AtlasStructuredQueryResult result,
final AtlasGene gene,
final String ef,
final String efv,
final UpdownCounter counter,
Set<Long> experiments,
boolean showNonDEData) {
long totalBitIndexQueryTime = 0;
long totalNcdfQueryTime = 0;
long start = System.currentTimeMillis();
// Retrieve experiments in which geneId-ef-efv have UP or DOWN expression
EfvAttribute attr = new EfvAttribute(ef, efv, StatisticsType.UP_DOWN);
Set<ExperimentInfo> scoringExps =
atlasStatisticsQueryService.getScoringExperimentsForBioEntityAndAttribute(gene.getGeneId(), attr);
totalBitIndexQueryTime += System.currentTimeMillis() - start;
String designElementAccession = null;
List<ListResultRowExperiment> experimentsForRow = new ArrayList<ListResultRowExperiment>();
// Store minimum up/down pValues for across all scoring experiments
float pup = 1, pdn = 1;
for (ExperimentInfo exp : scoringExps) {
if ((!experiments.isEmpty() && !experiments.contains(exp.getExperimentId())) ||
// We currently allow up to result.getRowsPerGene() list view rows per gene (where each list row corresponds to a single ef-efv)
result.getNumberOfListResultsForGene(gene) > result.getRowsPerGene())
continue;
// Get Experiment to get experiment description, needed in list view
// TODO: we use bot experimentSolrDAO and underlying Solr server in this class.
// That means we're using two different levels of abstraction in the same class
// That means we're not structuring out application properly
Experiment aexp = experimentDAO.getById(exp.getExperimentId());
if (aexp == null)
continue;
List<ExpressionAnalysis> upDnEAs = new ArrayList<ExpressionAnalysis>();
boolean isUp = true;
// Note that it is possible for the same geneId-ef-efv to be both up and down in the same experiment (and proxy) - in two
// different design elements
if (counter.getUps() > 0) {
start = System.currentTimeMillis();
ExpressionAnalysis ea = atlasNetCDFDAO.getBestEAForGeneEfEfvInExperiment(aexp, (long) gene.getGeneId(), ef, efv, UpDownCondition.CONDITION_UP);
totalNcdfQueryTime += System.currentTimeMillis() - start;
if (ea != null) {
upDnEAs.add(ea);
}
}
if (counter.getDowns() > 0) {
start = System.currentTimeMillis();
ExpressionAnalysis ea = atlasNetCDFDAO.getBestEAForGeneEfEfvInExperiment(aexp, (long) gene.getGeneId(), ef, efv, UpDownCondition.CONDITION_DOWN);
totalNcdfQueryTime += System.currentTimeMillis() - start;
if (ea != null) {
upDnEAs.add(ea);
}
}
// Assemble experiment rows for the ListResultRow corresponding to geneId-ef-efv
for (ExpressionAnalysis ea : upDnEAs) {
if (designElementAccession == null) {
designElementAccession = ea.getDesignElementAccession();
}
if (ea.isUp()) {
pup = Math.min(pup, ea.getPValAdjusted());
} else if (ea.isDown()) {
pdn = Math.min(pdn, ea.getPValAdjusted());
}
ListResultRowExperiment experiment = new ListResultRowExperiment(experimentDAO.getById(exp.getExperimentId()),
ea.getPValAdjusted(),
UpDownExpression.valueOf(ea.getPValAdjusted(), ea.getTStatistic()));
experimentsForRow.add(experiment);
}
}
if (showNonDEData) {
// Now retrieve experiments in which geneId-ef-efv have NON_D_E expression
attr = attr.withStatType(StatisticsType.NON_D_E);
scoringExps = atlasStatisticsQueryService.getScoringExperimentsForBioEntityAndAttribute(gene.getGeneId(), attr);
for (ExperimentInfo exp : scoringExps) {
if ((!experiments.isEmpty() && !experiments.contains(exp.getExperimentId())) ||
// We currently allow up to result.getRowsPerGene() list view rows per gene (where each list row corresponds to a single ef-efv)
result.getNumberOfListResultsForGene(gene) > result.getRowsPerGene())
continue;
// Get Experiment to get experiment description, needed in list view
// TODO: we use bot experimentSolrDAO and underlying Solr server in this class.
// That means we're using two different levels of abstraction in the same class
// That means we're not structuring out application properly
Experiment aexp = experimentDAO.getById(exp.getExperimentId());
if (aexp == null)
continue;
start = System.currentTimeMillis();
ExpressionAnalysis ea = atlasNetCDFDAO.getBestEAForGeneEfEfvInExperiment(aexp, (long) gene.getGeneId(), ef, efv, UpDownCondition.CONDITION_NONDE);
totalNcdfQueryTime += System.currentTimeMillis() - start;
if (ea != null) {
ListResultRowExperiment experiment = new ListResultRowExperiment(experimentDAO.getById(exp.getExperimentId()),
// This is just a placeholder as pValues for nonDE expressions are currently (not available here
// and therefore) not displayed in experiment pop-ups off the list view
ea.getPValAdjusted(),
UpDownExpression.NONDE);
experimentsForRow.add(experiment);
}
}
}
// if more than experiment rows were created, sort the list by pValue (in asc order)
if (experimentsForRow.size() > 1) {
Collections.sort(experimentsForRow, new Comparator<ListResultRowExperiment>() {
public int compare(ListResultRowExperiment o1, ListResultRowExperiment o2) {
return Float.valueOf(o1.getPvalue()).compareTo(o2.getPvalue());
}
});
}
// If at least one experiment row was created add to result ListResultRow corresponding to geneId-ef-efv
if (experimentsForRow.size() > 0) {
ListResultRow row = new ListResultRow(ef, efv, counter.getUps(), counter.getDowns(), counter.getNones(), pup, pdn, designElementAccession);
row.setGene(gene);
row.setExp_list(experimentsForRow);
result.addListResult(row);
}
// Return timings to be logged later
return Pair.create(totalBitIndexQueryTime, totalNcdfQueryTime);
}
/**
* Creates SOLR query from atlas query
*
* @param rowsPerPage
* @param qstate query state
* @return solr query object
*/
private SolrQuery setupSolrQuery(Integer rowsPerPage, QueryState qstate) {
SolrQuery q = new SolrQuery(qstate.getSolrq().toString());
q.setRows(rowsPerPage);
q.setFacet(true);
int max = 0;
q.addField("score");
q.addField("id");
q.addField("name");
q.addField("identifier");
q.addField("species");
for (String p : genePropService.getIdNameDescProperties())
q.addField("property_" + p);
q.setFacetLimit(5 + max);
q.setFacetMinCount(2);
for (String p : genePropService.getDrilldownProperties()) {
q.addFacetField("property_f_" + p);
}
q.addFacetField("species");
q.setHighlight(true);
q.setHighlightSnippets(100);
q.setParam("hl.usePhraseHighlighter", "true");
q.setParam("hl.mergeContiguous", "true");
q.setHighlightRequireFieldMatch(true);
q.addHighlightField("id");
q.addHighlightField("name");
q.addHighlightField("synonym");
q.addHighlightField("identifier");
for (String p : genePropService.getIdNameDescProperties())
q.addHighlightField("property_" + p);
log.debug("Expanded query: " + q.toString());
return q;
}
/**
* Retrieves gene facets from SOLR response
*
* @param response solr response
* @param name field name to exptract
* @param values query values (to clear off the facet)
* @return iterable collection of facet values with counters
*/
private Iterable<FacetCounter> getGeneFacet(QueryResponse response, final String name, Set<String> values) {
List<FacetCounter> facet = new ArrayList<FacetCounter>();
FacetField ff = response.getFacetField(name);
if (ff == null || ff.getValueCount() < 2 || ff.getValues() == null)
return new ArrayList<FacetCounter>();
for (FacetField.Count ffc : ff.getValues())
if (!values.contains(ffc.getName()))
facet.add(new FacetCounter(ffc.getName(), (int) ffc.getCount()));
if (facet.size() < 2)
return new ArrayList<FacetCounter>();
Collections.sort(facet);
return facet.subList(0, Math.min(facet.size(), 5));
}
/**
* Returns set of experimental factor for drop-down, fileterd by config
*
* @return set of strings representing experimental factors
*/
public Collection<String> getExperimentalFactorOptions() {
List<String> factors = new ArrayList<String>();
factors.addAll(efvService.getOptionsFactors());
Collections.sort(factors, new Comparator<String>() {
public int compare(String o1, String o2) {
return atlasProperties.getCuratedEf(o1).compareToIgnoreCase(atlasProperties.getCuratedGeneProperty(o2));
}
});
return factors;
}
/**
* Returns list of available gene property options sorted by curated value
*
* @return list of strings
*/
public List<String> getGenePropertyOptions() {
List<String> result = new ArrayList<String>();
for (String v : genePropService.getIdNameDescProperties())
result.add(v);
result.add(Constants.GENE_PROPERTY_NAME);
Collections.sort(result, new Comparator<String>() {
public int compare(String o1, String o2) {
return atlasProperties.getCuratedGeneProperty(o1).compareToIgnoreCase(atlasProperties.getCuratedGeneProperty(o2));
}
});
return result;
}
/**
* Returns list of available species
*
* @return list of species strings
*/
public SortedSet<String> getSpeciesOptions() {
if (allSpecies.isEmpty()) {
SolrQuery q = new SolrQuery("*:*");
q.setRows(0);
q.addFacetField("species");
q.setFacet(true);
q.setFacetLimit(-1);
q.setFacetMinCount(1);
q.setFacetSort(FacetParams.FACET_SORT_COUNT);
try {
QueryResponse qr = solrServerAtlas.query(q);
if (qr.getFacetFields().get(0).getValues() != null) {
for (FacetField.Count ffc : qr.getFacetFields().get(0).getValues()) {
allSpecies.add(ffc.getName());
}
}
} catch (SolrServerException e) {
throw createUnexpected("Can't fetch all factors", e);
}
}
return allSpecies;
}
/**
* Index rebuild notification handler
*/
public void onIndexBuildFinish() {
allSpecies.clear();
}
public void onIndexBuildStart() {
}
/**
* Destructor called by Spring
*
* @throws Exception
*/
public void destroy() throws Exception {
if (indexBuilder != null)
indexBuilder.unregisterIndexBuildEventHandler(this);
}
}
| true | true | private void processResultGenes(QueryResponse response,
AtlasStructuredQueryResult result,
QueryState qstate,
AtlasStructuredQuery query,
Integer numOfResults,
StatisticsQueryCondition statisticsQuery,
boolean usePvalsInHeatmapOrdering
) throws SolrServerException {
// Note that this method processes results from the query assembled from an already sorted list of
// gene id () got from an earlier atlasStatisticsQueryService.getSortedGenes() call). However, by default Solr
// returns its results in the order specified by its notion of relevancy, and I don't know of any way of
// forcing Solr to return results in the order of explicit field values in the query.
// In effect, the earlier sorting itself is lost, though it still has a crucial function of identifying
// the required chunk from the overall sorted list of genes for the user's query.
// To re-instate the sorting we enter the newly constructed StructuredResultRows into the TreeSet below (ordered
// according to compareTo() method in StructuredResultRow)
SortedSet<StructuredResultRow> structuredResultRows = new TreeSet<StructuredResultRow>();
// The list below stores list of StructuredResultRow's until all the (efo and efv) counters have been added
// to each row. Only after that is this list sorted by the cumulative experiment counts in each row.
List<StructuredResultRow> unsortedHeatmapRows = new ArrayList<StructuredResultRow>();
// Initialise scores cache to store efo counts for the group of genes of interest to this query.
// For each heat map row other than the first, the cache will be hit instead of AtlasStatisticsQueryService
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache = getScoresCache();
SolrDocumentList docs = response.getResults();
EfvTree<ColumnInfo> resultEfvs = new EfvTree<ColumnInfo>();
EfoTree<ColumnInfo> resultEfos = qstate.getEfos();
Iterable<EfvTree.EfEfv<ColumnInfo>> efvList = qstate.getEfvs().getValueSortedList();
Iterable<EfoTree.EfoItem<ColumnInfo>> efoList = qstate.getEfos().getValueOrderedList();
boolean hasQueryEfoEfvs = qstate.hasQueryEfoEfvs();
// heatmap column numberer
Maker<ColumnInfo> numberer = new Maker<ColumnInfo>() {
private int num = 0;
public ColumnInfo make() {
return new BaseColumnInfo(num++);
}
};
Collection<String> autoFactors;
if ((!query.getConditions().isEmpty() && !query.getConditions().iterator().next().isAnything())
|| query.isFullHeatmap()) {
autoFactors = efvService.getAllFactors();
} else {
// If the user hasn't specified any conditions or query.isFullHeatmap() is false (the default for heatmap),
// choose only 'usual factors of interest' - as shown in GXA DAS source
autoFactors = atlasProperties.getDasFactors();
}
// timing collection variables
long overallBitStatsProcessingTime = 0;
long overallNcdfAccessTimeForListView = 0;
long overallBitStatsProcessingTimeForListView = 0;
// Retrieve from docs the gene restriction list to be used in subsequent StatisticsStorage queries.
Set<Integer> bioEntityIdRestrictionSet = getBioEntityIdRestrictionSet(docs);
if (!hasQueryEfoEfvs) {
long timeStart = System.currentTimeMillis();
populateScoringAttributes(bioEntityIdRestrictionSet, autoFactors, qstate, statisticsQuery.getStatisticsType(), query.isFullHeatmap());
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
List<EfvTree.EfEfv<ColumnInfo>> scoringEfvs = qstate.getEfvs().getValueSortedList();
log.info("User query contained no efv/efo conditions; collected " + scoringEfvs.size() + " scoring efv conditions in " + diff + " ms");
efvList = scoringEfvs;
}
// This map stores HeatMapColumn object for each EfvTree.EfEfv processed in the loop below.
// Note that HeatMapColumn contains a list of counters that is a vertical slice through heatmap, specific
// to that EfvTree.EfEfv. HeatMapColumn objects wll be used to sort efv heatmap columns by their cumulative
// experiment counts before adding appropriate counters to the horizontal slices (per gene) displayed in heatmap,
// i.e. StructuredResultRow's in unsortedHeatmapRows.
Map<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn> efvToColumn = new HashMap<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn>();
log.debug("Processing " + numOfResults + " result bioentities...");
result.setTotal(numOfResults);
int added = 0;
for (SolrDocument doc : docs) {
long hmRowStart = System.currentTimeMillis();
long overallBitStatsProcessingTimeForHeatMapRow = 0;
Object idObj = doc.getFieldValue("id");
if (idObj == null) {
continue;
}
Integer bioEntityId = (Integer) idObj;
AtlasGene gene = new AtlasGene(doc);
if (response.getHighlighting() != null)
gene.setGeneHighlights(response.getHighlighting().get(bioEntityId.toString()));
// counters will contain a list of heatmap cell payloads
List<UpdownCounter> efoCounters = new ArrayList<UpdownCounter>() {
@Override
public UpdownCounter get(int index) {
if (index < size())
return super.get(index);
else
return new UpdownCounter(0, 0, 0, 0, 0);
}
};
// This variable will determine if a StructuredResultRow will be displayed;
// it is set to true of at least one cell has counts greater than minExperiments - used in StructuredResultRow constructor.
boolean rowQualifies = false;
// Now calculate up/dn/nonde counts for all efvs/efos for the current gene
Iterator<EfvTree.EfEfv<ColumnInfo>> itEfv = efvList.iterator();
Iterator<EfoTree.EfoItem<ColumnInfo>> itEfo = efoList.iterator();
// attrToCounter is used to construct list view and stores mapping between attributes derived from processed efo terms and
// their corresponding statistics counters
Map<EfvAttribute, UpdownCounter> attrToCounter = new HashMap<EfvAttribute, UpdownCounter>();
EfvTree.EfEfv<ColumnInfo> efEfv = null;
EfoTree.EfoItem<ColumnInfo> efoItem = null;
while (itEfv.hasNext() || itEfo.hasNext() || efEfv != null || efoItem != null) {
if (itEfv.hasNext() && efEfv == null) {
efEfv = itEfv.next();
}
if (itEfo.hasNext() && efoItem == null) {
efoItem = itEfo.next();
}
UpdownCounter counter;
boolean usingEfv = efoItem == null || (efEfv != null && efEfv.getPayload().compareTo(efoItem.getPayload()) < 0);
if (usingEfv) {
String ef = efEfv.getEf();
String efv = efEfv.getEfv();
EfvAttribute attr = new EfvAttribute(ef, efv, null);
if (!attrToCounter.containsKey(attr)) {
// 1. In the list view: the above test prevents querying bit index for the same attribute more then once,
// e.g. when an efo is also processed (and thus broken down into efvs it maps to - c.f. below)
// for the current gene that maps to that attribute
// 2. In the heatmap view, the use of attrToCounter is not essential, but it innocuous
long timeStart = System.currentTimeMillis();
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efEfv.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
attrToCounter.put(attr, counter);
}
if (!efvToColumn.containsKey(efEfv)) {
efvToColumn.put(efEfv, new HeatMapColumn(efEfv));
}
efvToColumn.get(efEfv).addRowCounter(attrToCounter.get(attr));
if (efEfv.getPayload().isQualified(attrToCounter.get(attr))) {
rowQualifies = true;
efvToColumn.get(efEfv).setQualifies(true);
}
efEfv = null;
} else {
String efoTerm = efoItem.getId();
if (query.getViewType() == ViewType.LIST) { // efo's in list view
Set<EfvAttribute> attrsForEfo = atlasStatisticsQueryService.getAttributesForEfo(efoTerm);
long timeStart = System.currentTimeMillis();
for (EfvAttribute attr : attrsForEfo) {
if (!attrToCounter.containsKey(attr)) {
// the above test prevents querying bit index for the same attribute more then once - if more
// than one efo processed here maps to that attribute (e.g. an efo's term and its parent)
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
attrToCounter.put(attr, counter);
}
}
}
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForListView += diff;
} else { // efo's in heatmap
// Get statistics for efoTerm-gene
long timeStart = System.currentTimeMillis();
// third param is not important below in getStats() - as we get counts for all stat types anyway
Attribute attr = new EfoAttribute(efoTerm, null);
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
if (!resultEfos.getPayload(efoTerm).isPositionSet()) {
// If the final heatmap column position has not yet been set (e.g. while processing
// a previous gene in the main loop), set it now
resultEfos.setPayload(efoTerm, numberer.make());
}
// Accumulate efo counters
efoCounters.add(counter);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
// Mark efo for displaying in heatmap it the experiment counts in this cell qualify it
resultEfos.mark(efoItem.getId(), !INCLUDE_EFO_PARENTS_IN_HEATMAP);
// Tag efoItem as non-expandable in heatmap header - if applicable
resultEfos.setNonExpandableIfApplicable(efoItem.getId(), qstate.getScoringEfos());
} else {
log.debug("Rejecting " + efoItem.getId() + " for bioentity " + bioEntityId + " as score does not satisfy min experiments condition");
}
}
efoItem = null;
}
}
// Store a Structured row (with just efo counters in it for now) in unsortedHeatmapRows. Efv counters will be added
// below once the efv columns have been sorted by their cumulative experiment counts.
/** TODO
* In advanced (though never simple) i/f queries, heatmap columns will correspond to ef-efv with different expressions in the user's query.
* For example, in the case of 'UP in heart and DOWN in lung' query, the required expression for a group of heart-related efv columns
* will be UP, and the required expression for another group of lung-related columns will be DOWN.
* In such cases, ef-efv-specific StatisticsType(s) should be passed to StructuredResultRow constructor. Then the sorting of unsortedHeatmapRows
* in the final heatmap would truly reflect the user's query. As things stand now, a simplification has been made that only
* the first clause's stat type (stored in statisticsQuery.getStatisticsType()) is passed to StructuredResultRow constructor. Consequently,
* unsortedHeatmapRows are currently sorted by the aggregate counts corresponding to statistics type of the first AND clause only.
*/
unsortedHeatmapRows.add(new StructuredResultRow(gene, efoCounters, rowQualifies, statisticsQuery.getStatisticsType()));
// Now process for list view all attributes in attrToCounter (mapped to by efo's processed above)
if (query.getViewType() == ViewType.LIST) {
for (Map.Entry<EfvAttribute, UpdownCounter> entry : attrToCounter.entrySet()) {
final EfvAttribute attribute = entry.getKey();
boolean displayNonDECounts = entry.getValue().getNones() > 0;
Pair<Long, Long> queryTimes = loadListExperiments(result, gene, attribute.getEf(), attribute.getEfv(), entry.getValue(), qstate.getExperiments(), displayNonDECounts);
overallBitStatsProcessingTime += queryTimes.getFirst();
overallBitStatsProcessingTimeForListView += queryTimes.getFirst();
overallNcdfAccessTimeForListView += queryTimes.getSecond();
}
}
log.debug("Processed gene: " + gene.getGeneName() + " in: " + (System.currentTimeMillis() - hmRowStart) + "; bit stats time: " + overallBitStatsProcessingTimeForHeatMapRow);
}
// So far we accumulated rows of counters for all efos into unsortedHeatmapRows
// We have also accumulated Efv columns date in efvToColumn
// What we need to do now is the following:
// 1. Eliminate columns that don't qualify to be displayed in heatmap
// 2. Sort HeatMapColumns in efvToColumn.values() according to the cumulative column counter and
// 3. Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads
// Remove non-qualifying columns
Collection<HeatMapColumn> qualifyingColumns = Collections2.filter(efvToColumn.values(),
new Predicate<HeatMapColumn>() {
public boolean apply(HeatMapColumn col) {
return col.qualifies();
}
});
List<HeatMapColumn> efvColumns = new ArrayList<HeatMapColumn>(qualifyingColumns);
// Sort efv columns by the their cumulative experiment counts in each column
Collections.sort(efvColumns);
// Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads.
for (HeatMapColumn hmColumn : efvColumns) {
EfvTree.EfEfv<ColumnInfo> efEfv = hmColumn.getEfEfv();
efEfv.setPayload(numberer.make()); // Set position for each efv column after the above sort
int row = 0;
for (StructuredResultRow structuredRow : unsortedHeatmapRows) {
UpdownCounter counter = hmColumn.getRowCounter(row++);
structuredRow.addCounter(counter);
if (efEfv.getPayload().isQualified(counter)) {
resultEfvs.put(efEfv);
}
}
}
// Now that unsortedHeatmapRows has both efo and efv data, where:
// efo columns are sorted by the order in which they were entered into QueryState
// efv columns are sorted by their cumulative column experiment counts (efv's with highest experiment counts moving to the left)
// - we can transfer unsortedHeatmapRows to structuredResultRows. Because the latter is a TreeSet, heatmap rows
// will now be sorted according to their aggregated row counts, with genes with highest experiment counts moving
// to the top of the heatmap
structuredResultRows.addAll(unsortedHeatmapRows);
log.debug("structuredResultRows.size() = " + structuredResultRows.size() + "; added = " + added);
// Returned results sorted by geneScore, eliminating that had zero qualifying score (i.e. all the scores added for
// all efvs where the counts were >= min experiments)
for (StructuredResultRow row : structuredResultRows) {
if (row.qualifies()) {
result.addResult(row);
} else {
log.info("Excluding from heatmap row for gene: " + row.getGene().getGeneName());
}
}
log.info("Overall bitstats processing time: " + overallBitStatsProcessingTime + " ms");
if (query.getViewType() == ViewType.LIST) {
log.info("Overall listview-related bitstats processing time: " + overallBitStatsProcessingTimeForListView + " ms");
log.info("Overall listview-related ncdf querying time: " + overallNcdfAccessTimeForListView + " ms");
}
result.setResultEfvs(resultEfvs);
result.setResultEfos(resultEfos);
log.info("Retrieved query completely: " + result.getSize() + " records of " +
result.getTotal() + " total starting from " + result.getStart());
log.debug("Resulting EFVs are: " + resultEfvs.getNameSortedList().size());
log.debug("Resulting EFOs are: " + resultEfos.getMarkedSubTreeList().size());
}
| private void processResultGenes(QueryResponse response,
AtlasStructuredQueryResult result,
QueryState qstate,
AtlasStructuredQuery query,
Integer numOfResults,
StatisticsQueryCondition statisticsQuery,
boolean usePvalsInHeatmapOrdering
) throws SolrServerException {
// Note that this method processes results from the query assembled from an already sorted list of
// gene id () got from an earlier atlasStatisticsQueryService.getSortedGenes() call). However, by default Solr
// returns its results in the order specified by its notion of relevancy, and I don't know of any way of
// forcing Solr to return results in the order of explicit field values in the query.
// In effect, the earlier sorting itself is lost, though it still has a crucial function of identifying
// the required chunk from the overall sorted list of genes for the user's query.
// To re-instate the sorting we enter the newly constructed StructuredResultRows into the TreeSet below (ordered
// according to compareTo() method in StructuredResultRow)
SortedSet<StructuredResultRow> structuredResultRows = new TreeSet<StructuredResultRow>();
// The list below stores list of StructuredResultRow's until all the (efo and efv) counters have been added
// to each row. Only after that is this list sorted by the cumulative experiment counts in each row.
List<StructuredResultRow> unsortedHeatmapRows = new ArrayList<StructuredResultRow>();
// Initialise scores cache to store efo counts for the group of genes of interest to this query.
// For each heat map row other than the first, the cache will be hit instead of AtlasStatisticsQueryService
Map<StatisticsType, HashMap<String, Multiset<Integer>>> scoresCache = getScoresCache();
SolrDocumentList docs = response.getResults();
EfvTree<ColumnInfo> resultEfvs = new EfvTree<ColumnInfo>();
EfoTree<ColumnInfo> resultEfos = qstate.getEfos();
Iterable<EfvTree.EfEfv<ColumnInfo>> efvList = qstate.getEfvs().getValueSortedList();
Iterable<EfoTree.EfoItem<ColumnInfo>> efoList = qstate.getEfos().getValueOrderedList();
boolean hasQueryEfoEfvs = qstate.hasQueryEfoEfvs();
// heatmap column numberer
Maker<ColumnInfo> numberer = new Maker<ColumnInfo>() {
private int num = 0;
public ColumnInfo make() {
return new BaseColumnInfo(num++);
}
};
Collection<String> autoFactors;
if ((!query.getConditions().isEmpty() && !query.getConditions().iterator().next().isAnything())
|| query.isFullHeatmap()) {
autoFactors = efvService.getAllFactors();
} else {
// If the user hasn't specified any conditions or query.isFullHeatmap() is false (the default for heatmap),
// choose only 'usual factors of interest' - as shown in GXA DAS source
autoFactors = atlasProperties.getDasFactors();
}
// timing collection variables
long overallBitStatsProcessingTime = 0;
long overallNcdfAccessTimeForListView = 0;
long overallBitStatsProcessingTimeForListView = 0;
// Retrieve from docs the gene restriction list to be used in subsequent StatisticsStorage queries.
Set<Integer> bioEntityIdRestrictionSet = getBioEntityIdRestrictionSet(docs);
if (!hasQueryEfoEfvs) {
long timeStart = System.currentTimeMillis();
populateScoringAttributes(bioEntityIdRestrictionSet, autoFactors, qstate, statisticsQuery.getStatisticsType(), query.isFullHeatmap());
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
List<EfvTree.EfEfv<ColumnInfo>> scoringEfvs = qstate.getEfvs().getValueSortedList();
log.info("User query contained no efv/efo conditions; collected " + scoringEfvs.size() + " scoring efv conditions in " + diff + " ms");
efvList = scoringEfvs;
}
// This map stores HeatMapColumn object for each EfvTree.EfEfv processed in the loop below.
// Note that HeatMapColumn contains a list of counters that is a vertical slice through heatmap, specific
// to that EfvTree.EfEfv. HeatMapColumn objects wll be used to sort efv heatmap columns by their cumulative
// experiment counts before adding appropriate counters to the horizontal slices (per gene) displayed in heatmap,
// i.e. StructuredResultRow's in unsortedHeatmapRows.
Map<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn> efvToColumn = new HashMap<EfvTree.EfEfv<ColumnInfo>, HeatMapColumn>();
log.debug("Processing " + numOfResults + " result bioentities...");
result.setTotal(numOfResults);
int added = 0;
for (SolrDocument doc : docs) {
long hmRowStart = System.currentTimeMillis();
long overallBitStatsProcessingTimeForHeatMapRow = 0;
Object idObj = doc.getFieldValue("id");
if (idObj == null) {
continue;
}
Integer bioEntityId = (Integer) idObj;
AtlasGene gene = new AtlasGene(doc);
if (response.getHighlighting() != null)
gene.setGeneHighlights(response.getHighlighting().get(bioEntityId.toString()));
// counters will contain a list of heatmap cell payloads
List<UpdownCounter> efoCounters = new ArrayList<UpdownCounter>() {
@Override
public UpdownCounter get(int index) {
if (index < size())
return super.get(index);
else
return new UpdownCounter(0, 0, 0, 0, 0);
}
};
// This variable will determine if a StructuredResultRow will be displayed;
// it is set to true of at least one cell has counts greater than minExperiments - used in StructuredResultRow constructor.
boolean rowQualifies = false;
// Now calculate up/dn/nonde counts for all efvs/efos for the current gene
Iterator<EfvTree.EfEfv<ColumnInfo>> itEfv = efvList.iterator();
Iterator<EfoTree.EfoItem<ColumnInfo>> itEfo = efoList.iterator();
// attrToCounter is used to construct list view and stores mapping between attributes derived from processed efo terms and
// their corresponding statistics counters
Map<EfvAttribute, UpdownCounter> attrToCounter = new HashMap<EfvAttribute, UpdownCounter>();
EfvTree.EfEfv<ColumnInfo> efEfv = null;
EfoTree.EfoItem<ColumnInfo> efoItem = null;
while (itEfv.hasNext() || itEfo.hasNext() || efEfv != null || efoItem != null) {
if (itEfv.hasNext() && efEfv == null) {
efEfv = itEfv.next();
}
if (itEfo.hasNext() && efoItem == null) {
efoItem = itEfo.next();
}
UpdownCounter counter;
boolean usingEfv = efoItem == null || (efEfv != null && efEfv.getPayload().compareTo(efoItem.getPayload()) < 0);
if (usingEfv) {
String ef = efEfv.getEf();
String efv = efEfv.getEfv();
EfvAttribute attr = new EfvAttribute(ef, efv, null);
if (!attrToCounter.containsKey(attr)) {
// 1. In the list view: the above test prevents querying bit index for the same attribute more then once,
// e.g. when an efo is also processed (and thus broken down into efvs it maps to - c.f. below)
// for the current gene that maps to that attribute
// 2. In the heatmap view, the use of attrToCounter is not essential, but it innocuous
long timeStart = System.currentTimeMillis();
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efEfv.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
attrToCounter.put(attr, counter);
}
if (!efvToColumn.containsKey(efEfv)) {
efvToColumn.put(efEfv, new HeatMapColumn(efEfv));
}
efvToColumn.get(efEfv).addRowCounter(attrToCounter.get(attr));
if (efEfv.getPayload().isQualified(attrToCounter.get(attr))) {
rowQualifies = true;
efvToColumn.get(efEfv).setQualifies(true);
} else { // Counter for attr doesn't qualify
if (query.getViewType() == ViewType.LIST) {
// In list view we display gene-ef-efv rows, hence if a given ef-efv counter doesn't qualify
// remove it from the rows to be displayed for the current gene
attrToCounter.remove(attr);
}
}
efEfv = null;
} else {
String efoTerm = efoItem.getId();
if (query.getViewType() == ViewType.LIST) { // efo's in list view
Set<EfvAttribute> attrsForEfo = atlasStatisticsQueryService.getAttributesForEfo(efoTerm);
long timeStart = System.currentTimeMillis();
for (EfvAttribute attr : attrsForEfo) {
if (!attrToCounter.containsKey(attr)) {
// the above test prevents querying bit index for the same attribute more then once - if more
// than one efo processed here maps to that attribute (e.g. an efo's term and its parent)
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
attrToCounter.put(attr, counter);
}
}
}
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForListView += diff;
} else { // efo's in heatmap
// Get statistics for efoTerm-gene
long timeStart = System.currentTimeMillis();
// third param is not important below in getStats() - as we get counts for all stat types anyway
Attribute attr = new EfoAttribute(efoTerm, null);
counter = getStats(scoresCache, attr, bioEntityId, bioEntityIdRestrictionSet, ((QueryColumnInfo) efoItem.getPayload()).displayNonDECounts(), usePvalsInHeatmapOrdering);
long diff = System.currentTimeMillis() - timeStart;
overallBitStatsProcessingTime += diff;
overallBitStatsProcessingTimeForHeatMapRow += diff;
if (!resultEfos.getPayload(efoTerm).isPositionSet()) {
// If the final heatmap column position has not yet been set (e.g. while processing
// a previous gene in the main loop), set it now
resultEfos.setPayload(efoTerm, numberer.make());
}
// Accumulate efo counters
efoCounters.add(counter);
if (efoItem.getPayload().isQualified(counter)) {
rowQualifies = true;
// Mark efo for displaying in heatmap it the experiment counts in this cell qualify it
resultEfos.mark(efoItem.getId(), !INCLUDE_EFO_PARENTS_IN_HEATMAP);
// Tag efoItem as non-expandable in heatmap header - if applicable
resultEfos.setNonExpandableIfApplicable(efoItem.getId(), qstate.getScoringEfos());
} else {
log.debug("Rejecting " + efoItem.getId() + " for bioentity " + bioEntityId + " as score does not satisfy min experiments condition");
}
}
efoItem = null;
}
}
// Store a Structured row (with just efo counters in it for now) in unsortedHeatmapRows. Efv counters will be added
// below once the efv columns have been sorted by their cumulative experiment counts.
/** TODO
* In advanced (though never simple) i/f queries, heatmap columns will correspond to ef-efv with different expressions in the user's query.
* For example, in the case of 'UP in heart and DOWN in lung' query, the required expression for a group of heart-related efv columns
* will be UP, and the required expression for another group of lung-related columns will be DOWN.
* In such cases, ef-efv-specific StatisticsType(s) should be passed to StructuredResultRow constructor. Then the sorting of unsortedHeatmapRows
* in the final heatmap would truly reflect the user's query. As things stand now, a simplification has been made that only
* the first clause's stat type (stored in statisticsQuery.getStatisticsType()) is passed to StructuredResultRow constructor. Consequently,
* unsortedHeatmapRows are currently sorted by the aggregate counts corresponding to statistics type of the first AND clause only.
*/
unsortedHeatmapRows.add(new StructuredResultRow(gene, efoCounters, rowQualifies, statisticsQuery.getStatisticsType()));
// Now process for list view all attributes in attrToCounter (mapped to by efo's processed above)
if (query.getViewType() == ViewType.LIST) {
for (Map.Entry<EfvAttribute, UpdownCounter> entry : attrToCounter.entrySet()) {
final EfvAttribute attribute = entry.getKey();
boolean displayNonDECounts = entry.getValue().getNones() > 0;
Pair<Long, Long> queryTimes = loadListExperiments(result, gene, attribute.getEf(), attribute.getEfv(), entry.getValue(), qstate.getExperiments(), displayNonDECounts);
overallBitStatsProcessingTime += queryTimes.getFirst();
overallBitStatsProcessingTimeForListView += queryTimes.getFirst();
overallNcdfAccessTimeForListView += queryTimes.getSecond();
}
}
log.debug("Processed gene: " + gene.getGeneName() + " in: " + (System.currentTimeMillis() - hmRowStart) + "; bit stats time: " + overallBitStatsProcessingTimeForHeatMapRow);
}
// So far we accumulated rows of counters for all efos into unsortedHeatmapRows
// We have also accumulated Efv columns date in efvToColumn
// What we need to do now is the following:
// 1. Eliminate columns that don't qualify to be displayed in heatmap
// 2. Sort HeatMapColumns in efvToColumn.values() according to the cumulative column counter and
// 3. Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads
// Remove non-qualifying columns
Collection<HeatMapColumn> qualifyingColumns = Collections2.filter(efvToColumn.values(),
new Predicate<HeatMapColumn>() {
public boolean apply(HeatMapColumn col) {
return col.qualifies();
}
});
List<HeatMapColumn> efvColumns = new ArrayList<HeatMapColumn>(qualifyingColumns);
// Sort efv columns by the their cumulative experiment counts in each column
Collections.sort(efvColumns);
// Transfer efv counters to rows in unsortedHeatmapRows and transfer efvs to resultEfvs, now with the correct sorted
// column positions as payloads.
for (HeatMapColumn hmColumn : efvColumns) {
EfvTree.EfEfv<ColumnInfo> efEfv = hmColumn.getEfEfv();
efEfv.setPayload(numberer.make()); // Set position for each efv column after the above sort
int row = 0;
for (StructuredResultRow structuredRow : unsortedHeatmapRows) {
UpdownCounter counter = hmColumn.getRowCounter(row++);
structuredRow.addCounter(counter);
if (efEfv.getPayload().isQualified(counter)) {
resultEfvs.put(efEfv);
}
}
}
// Now that unsortedHeatmapRows has both efo and efv data, where:
// efo columns are sorted by the order in which they were entered into QueryState
// efv columns are sorted by their cumulative column experiment counts (efv's with highest experiment counts moving to the left)
// - we can transfer unsortedHeatmapRows to structuredResultRows. Because the latter is a TreeSet, heatmap rows
// will now be sorted according to their aggregated row counts, with genes with highest experiment counts moving
// to the top of the heatmap
structuredResultRows.addAll(unsortedHeatmapRows);
log.debug("structuredResultRows.size() = " + structuredResultRows.size() + "; added = " + added);
// Returned results sorted by geneScore, eliminating that had zero qualifying score (i.e. all the scores added for
// all efvs where the counts were >= min experiments)
for (StructuredResultRow row : structuredResultRows) {
if (row.qualifies()) {
result.addResult(row);
} else {
log.info("Excluding from heatmap row for gene: " + row.getGene().getGeneName());
}
}
log.info("Overall bitstats processing time: " + overallBitStatsProcessingTime + " ms");
if (query.getViewType() == ViewType.LIST) {
log.info("Overall listview-related bitstats processing time: " + overallBitStatsProcessingTimeForListView + " ms");
log.info("Overall listview-related ncdf querying time: " + overallNcdfAccessTimeForListView + " ms");
}
result.setResultEfvs(resultEfvs);
result.setResultEfos(resultEfos);
log.info("Retrieved query completely: " + result.getSize() + " records of " +
result.getTotal() + " total starting from " + result.getStart());
log.debug("Resulting EFVs are: " + resultEfvs.getNameSortedList().size());
log.debug("Resulting EFOs are: " + resultEfos.getMarkedSubTreeList().size());
}
|
diff --git a/src/main/java/hudson/plugins/dimensionsscm/DimensionsSCM.java b/src/main/java/hudson/plugins/dimensionsscm/DimensionsSCM.java
index d8d118c..4eaf814 100644
--- a/src/main/java/hudson/plugins/dimensionsscm/DimensionsSCM.java
+++ b/src/main/java/hudson/plugins/dimensionsscm/DimensionsSCM.java
@@ -1,1278 +1,1278 @@
/* ===========================================================================
* Copyright (c) 2007 Serena Software. All rights reserved.
*
* Use of the Sample Code provided by Serena is governed by the following
* terms and conditions. By using the Sample Code, you agree to be bound by
* the terms contained herein. If you do not agree to the terms herein, do
* not install, copy, or use the Sample Code.
*
* 1. GRANT OF LICENSE. Subject to the terms and conditions herein, you
* shall have the nonexclusive, nontransferable right to use the Sample Code
* for the sole purpose of developing applications for use solely with the
* Serena software product(s) that you have licensed separately from Serena.
* Such applications shall be for your internal use only. You further agree
* that you will not: (a) sell, market, or distribute any copies of the
* Sample Code or any derivatives or components thereof; (b) use the Sample
* Code or any derivatives thereof for any commercial purpose; or (c) assign
* or transfer rights to the Sample Code or any derivatives thereof.
*
* 2. DISCLAIMER OF WARRANTIES. TO THE MAXIMUM EXTENT PERMITTED BY
* APPLICABLE LAW, SERENA PROVIDES THE SAMPLE CODE AS IS AND WITH ALL
* FAULTS, AND HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EITHER
* EXPRESSED, IMPLIED OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, ANY
* IMPLIED WARRANTIES OR CONDITIONS OF MERCHANTABILITY, OF FITNESS FOR A
* PARTICULAR PURPOSE, OF LACK OF VIRUSES, OF RESULTS, AND OF LACK OF
* NEGLIGENCE OR LACK OF WORKMANLIKE EFFORT, CONDITION OF TITLE, QUIET
* ENJOYMENT, OR NON-INFRINGEMENT. THE ENTIRE RISK AS TO THE QUALITY OF
* OR ARISING OUT OF USE OR PERFORMANCE OF THE SAMPLE CODE, IF ANY,
* REMAINS WITH YOU.
*
* 3. EXCLUSION OF DAMAGES. TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE
* LAW, YOU AGREE THAT IN CONSIDERATION FOR RECEIVING THE SAMPLE CODE AT NO
* CHARGE TO YOU, SERENA SHALL NOT BE LIABLE FOR ANY DAMAGES WHATSOEVER,
* INCLUDING BUT NOT LIMITED TO DIRECT, SPECIAL, INCIDENTAL, INDIRECT, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, DAMAGES FOR LOSS OF
* PROFITS OR CONFIDENTIAL OR OTHER INFORMATION, FOR BUSINESS INTERRUPTION,
* FOR PERSONAL INJURY, FOR LOSS OF PRIVACY, FOR NEGLIGENCE, AND FOR ANY
* OTHER LOSS WHATSOEVER) ARISING OUT OF OR IN ANY WAY RELATED TO THE USE
* OF OR INABILITY TO USE THE SAMPLE CODE, EVEN IN THE EVENT OF THE FAULT,
* TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY, OR BREACH OF CONTRACT,
* EVEN IF SERENA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. THE
* FOREGOING LIMITATIONS, EXCLUSIONS AND DISCLAIMERS SHALL APPLY TO THE
* MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW. NOTWITHSTANDING THE ABOVE,
* IN NO EVENT SHALL SERENA'S LIABILITY UNDER THIS AGREEMENT OR WITH RESPECT
* TO YOUR USE OF THE SAMPLE CODE AND DERIVATIVES THEREOF EXCEED US$10.00.
*
* 4. INDEMNIFICATION. You hereby agree to defend, indemnify and hold
* harmless Serena from and against any and all liability, loss or claim
* arising from this agreement or from (i) your license of, use of or
* reliance upon the Sample Code or any related documentation or materials,
* or (ii) your development, use or reliance upon any application or
* derivative work created from the Sample Code.
*
* 5. TERMINATION OF THE LICENSE. This agreement and the underlying
* license granted hereby shall terminate if and when your license to the
* applicable Serena software product terminates or if you breach any terms
* and conditions of this agreement.
*
* 6. CONFIDENTIALITY. The Sample Code and all information relating to the
* Sample Code (collectively "Confidential Information") are the
* confidential information of Serena. You agree to maintain the
* Confidential Information in strict confidence for Serena. You agree not
* to disclose or duplicate, nor allow to be disclosed or duplicated, any
* Confidential Information, in whole or in part, except as permitted in
* this Agreement. You shall take all reasonable steps necessary to ensure
* that the Confidential Information is not made available or disclosed by
* you or by your employees to any other person, firm, or corporation. You
* agree that all authorized persons having access to the Confidential
* Information shall observe and perform under this nondisclosure covenant.
* You agree to immediately notify Serena of any unauthorized access to or
* possession of the Confidential Information.
*
* 7. AFFILIATES. Serena as used herein shall refer to Serena Software,
* Inc. and its affiliates. An entity shall be considered to be an
* affiliate of Serena if it is an entity that controls, is controlled by,
* or is under common control with Serena.
*
* 8. GENERAL. Title and full ownership rights to the Sample Code,
* including any derivative works shall remain with Serena. If a court of
* competent jurisdiction holds any provision of this agreement illegal or
* otherwise unenforceable, that provision shall be severed and the
* remainder of the agreement shall remain in full force and effect.
* ===========================================================================
*/
/*
* This experimental plugin extends Hudson support for Dimensions SCM repositories
*
* @author Tim Payne
*
*/
// Package name
package hudson.plugins.dimensionsscm;
// Dimensions imports
import hudson.plugins.dimensionsscm.DimensionsAPI;
import hudson.plugins.dimensionsscm.DimensionsSCMRepositoryBrowser;
import hudson.plugins.dimensionsscm.Logger;
import hudson.plugins.dimensionsscm.DimensionsChangeLogParser;
import hudson.plugins.dimensionsscm.DimensionsBuildWrapper;
import hudson.plugins.dimensionsscm.DimensionsBuildNotifier;
import hudson.plugins.dimensionsscm.DimensionsChecker;
import hudson.plugins.dimensionsscm.CheckOutAPITask;
import hudson.plugins.dimensionsscm.CheckOutCmdTask;
import hudson.plugins.dimensionsscm.GetHostDetailsTask;
// Hudson imports
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.model.Hudson;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.BuildListener;
import hudson.model.ModelObject;
import hudson.model.Run;
import hudson.model.TaskListener;
import hudson.scm.ChangeLogParser;
import hudson.scm.RepositoryBrowsers;
import hudson.scm.SCM;
import hudson.scm.SCMDescriptor;
import hudson.util.FormFieldValidator;
import hudson.util.Scrambler;
import hudson.util.VariableResolver;
import hudson.FilePath;
import hudson.FilePath.FileCallable;
import hudson.model.Node;
import hudson.model.Computer;
import hudson.model.Hudson.MasterComputer;
import hudson.remoting.Callable;
import hudson.remoting.DelegatingCallable;
import hudson.remoting.Channel;
import hudson.remoting.VirtualChannel;
// General imports
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.Vector;
import java.net.InetSocketAddress;
import java.net.InetAddress;
import java.net.UnknownHostException;
import javax.servlet.ServletException;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.apache.commons.lang.StringUtils;
/*
* Hudson requires the following functions to be implemented
*
* public boolean checkout(AbstractBuild build, Launcher launcher, FilePath workspace, BuildListener listener, File changelogFile)
* throws IOException, InterruptedException;
* public boolean pollChanges(AbstractProject project, Launcher launcher, FilePath workspace, TaskListener listener)
* throws IOException, InterruptedException;
* public ChangeLogParser createChangeLogParser();
* public SCMDescriptor<?> getDescriptor();
*
* For this experimental plugin, only the main ones will be implemented
*
*/
/*
* Main Dimensions SCM class which creates the plugin logic
*/
public class DimensionsSCM extends SCM implements Serializable
{
// Hudson details
private String project;
private String directory;
private String workarea;
private String jobUserName;
private String jobPasswd;
private String jobServer;
private String jobDatabase;
private String[] folders = new String[0];
private String jobTimeZone;
private String jobWebUrl;
private boolean canJobUpdate;
private boolean canJobDelete;
private boolean canJobForce;
private boolean canJobRevert;
DimensionsAPI dmSCM;
DimensionsSCMRepositoryBrowser browser;
public DimensionsSCM getSCM() {
return this;
}
public DimensionsAPI getAPI() {
return this.dmSCM;
}
/*
* Gets the project ID for the connection.
* @return the project ID
*/
public String getProject() {
return this.project;
}
/*
* Gets the project path.
* @return the project path
*/
public String getDirectory() {
return this.directory;
}
/*
* Gets the project paths.
* @return the project paths
*/
public String[] getFolders() {
return this.folders;
}
/*
* Gets the workarea path.
* @return the workarea path
*/
public String getWorkarea() {
return this.workarea;
}
/*
* Gets the job user ID for the connection.
* @return the job user ID
*/
public String getJobUserName() {
return this.jobUserName;
}
/*
* Gets the job passwd for the connection.
* @return the project ID
*/
public String getJobPasswd() {
return Scrambler.descramble(jobPasswd);
}
/*
* Gets the server ID for the connection.
* @return the server ID
*/
public String getJobServer() {
return this.jobServer;
}
/*
* Gets the job database ID for the connection.
* @return the job database ID
*/
public String getJobDatabase() {
return this.jobDatabase;
}
/*
* Gets the job timezone for the connection.
* @return the job timezone
*/
public String getJobTimeZone() {
return this.jobTimeZone;
}
/*
* Gets the job weburl ID for the connection.
* @return the job weburl
*/
public String getJobWebUrl() {
return this.jobWebUrl;
}
/*
* Gets the update .
* @return the update
*/
public boolean isCanJobUpdate() {
return this.canJobUpdate;
}
/*
* Gets the delete .
* @return the delete
*/
public boolean isCanJobDelete() {
return this.canJobDelete;
}
/*
* Gets the force .
* @return the force
*/
public boolean isCanJobForce() {
return this.canJobForce;
}
/*
* Gets the revert .
* @return the force
*/
public boolean isCanJobRevert() {
return this.canJobRevert;
}
@Extension
public static final DescriptorImpl DM_DESCRIPTOR = new DescriptorImpl();
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* requiresWorkspaceForPolling
* Description:
* Does this SCM plugin require a workspace for polling?
* Parameters:
* Return:
* @return boolean
*-----------------------------------------------------------------
*/
@Override
public boolean requiresWorkspaceForPolling() {
return false;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* supportsPolling
* Description:
* Does this SCM plugin support polling?
* Parameters:
* Return:
* @return boolean
*-----------------------------------------------------------------
*/
@Override
public boolean supportsPolling() {
return true;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* buildEnvVars
* Description:
* Build up environment variables for build support
* Parameters:
* Return:
*-----------------------------------------------------------------
*/
@Override
public void buildEnvVars(AbstractBuild build, Map<String, String> env)
{
// To be implemented when build support put in
super.buildEnvVars(build, env);
return;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* Constructor
* Description:
* Default constructor for the plugin
* Parameters:
* @param String project
* @param String workspaceName
* @param String workarea
* @param String jobServer
* @param String jobUserName
* @param String jobPasswd
* @param String jobDatabase
* Return:
* @return void
*-----------------------------------------------------------------
*/
public DimensionsSCM(String project,
String directory,
String workarea,
boolean canJobDelete,
boolean canJobForce,
boolean canJobRevert,
String jobUserName,
String jobPasswd,
String jobServer,
String jobDatabase,
boolean canJobUpdate,
String jobTimeZone,
String jobWebUrl)
{
this(project,null,workarea,canJobDelete,
canJobForce,canJobRevert,
jobUserName,jobPasswd,
jobServer,jobDatabase,
canJobUpdate,jobTimeZone,
jobWebUrl,directory);
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* Constructor
* Description:
* Default constructor for the plugin
* Parameters:
* @param String project
* @param String[] folderNames
* @param String workspaceName
* @param String workarea
* @param String jobServer
* @param String jobUserName
* @param String jobPasswd
* @param String jobDatabase
* @param String directory
* Return:
* @return void
*-----------------------------------------------------------------
*/
@DataBoundConstructor
public DimensionsSCM(String project,
String[] folders,
String workarea,
boolean canJobDelete,
boolean canJobForce,
boolean canJobRevert,
String jobUserName,
String jobPasswd,
String jobServer,
String jobDatabase,
boolean canJobUpdate,
String jobTimeZone,
String jobWebUrl,
String directory)
{
// Check the folders specified have data specified
if (folders != null) {
Logger.Debug("Folders are populated");
Vector<String> x = new Vector<String>();
for(int t=0;t<folders.length;t++) {
if (StringUtils.isNotEmpty(folders[t]))
x.add(folders[t]);
}
this.folders = (String[])x.toArray(new String[1]);
}
else {
if (directory != null)
this.folders[0] = directory;
}
// If nothing specified, then default to '/'
if (this.folders.length < 2) {
if (this.folders[0] == null || this.folders[0].length() < 1)
this.folders[0] = "/";
}
// Copying arguments to fields
this.project = (Util.fixEmptyAndTrim(project) == null ? "${JOB_NAME}" : project);
this.workarea = (Util.fixEmptyAndTrim(workarea) == null ? null : workarea);
this.directory = (Util.fixEmptyAndTrim(directory) == null ? null : directory);
this.jobServer = (Util.fixEmptyAndTrim(jobServer) == null ? getDescriptor().getServer() : jobServer);
this.jobUserName = (Util.fixEmptyAndTrim(jobUserName) == null ? getDescriptor().getUserName() : jobUserName);
this.jobDatabase = (Util.fixEmptyAndTrim(jobDatabase) == null ? getDescriptor().getDatabase() : jobDatabase);
String passwd = (Util.fixEmptyAndTrim(jobPasswd) == null ? getDescriptor().getPasswd() : jobPasswd);
this.jobPasswd = Scrambler.scramble(passwd);
this.canJobUpdate = canJobUpdate;
this.canJobDelete = canJobDelete;
this.canJobForce = canJobForce;
this.canJobRevert = canJobRevert;
this.jobTimeZone = (Util.fixEmptyAndTrim(jobTimeZone) == null ? getDescriptor().getTimeZone() : jobTimeZone);
this.jobWebUrl = (Util.fixEmptyAndTrim(jobWebUrl) == null ? getDescriptor().getWebUrl() : jobWebUrl);
String dmS = this.jobServer + "-" + this.jobUserName + ":" + this.jobDatabase;
Logger.Debug("Starting job for project '" + this.project + "' ('" + this.folders.length + "')" +
", connecting to " + dmS);
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* checkout
* Description:
* Checkout method for the plugin
* Parameters:
* @param AbstractBuild build
* @param Launcher launcher
* @param FilePath workspace
* @param BuildListener listener
* @param File changelogFile
* Return:
* @return boolean
*-----------------------------------------------------------------
*/
@Override
public boolean checkout(final AbstractBuild build, final Launcher launcher,
final FilePath workspace, final BuildListener listener,
final File changelogFile)
throws IOException, InterruptedException
{
boolean bRet = false;
if (!isCanJobUpdate()) {
Logger.Debug("Skipping checkout - " + this.getClass().getName());
}
Logger.Debug("Invoking checkout - " + this.getClass().getName());
try {
// Load other Dimensions plugins if set
DimensionsBuildWrapper.DescriptorImpl bwplugin = (DimensionsBuildWrapper.DescriptorImpl)
Hudson.getInstance().getDescriptor(DimensionsBuildWrapper.class);
DimensionsBuildNotifier.DescriptorImpl bnplugin = (DimensionsBuildNotifier.DescriptorImpl)
Hudson.getInstance().getDescriptor(DimensionsBuildNotifier.class);
if (DimensionsChecker.isValidPluginCombination(build)) {
Logger.Debug("Plugins are ok");
} else {
listener.fatalError("\n[DIMENSIONS] The plugin combinations you have selected are not valid.");
listener.fatalError("\n[DIMENSIONS] Please review online help to determine valid plugin uses.");
return false;
}
if (isCanJobUpdate()) {
// Get the details of the master
InetAddress netAddr = InetAddress.getLocalHost();
byte[] ipAddr = netAddr.getAddress();
String hostname = netAddr.getHostName();
boolean master = false;
GetHostDetailsTask buildHost = new GetHostDetailsTask(hostname);
master = workspace.act(buildHost);
if (master) {
// Running on master...
listener.getLogger().println("[DIMENSIONS] Running checkout on master...");
listener.getLogger().flush();
// Using Java API because this allows the plugin to work on platforms
// where Dimensions has not been ported, e.g. MAC OS, which is what
// I use
CheckOutAPITask task = new CheckOutAPITask(build,this,workspace,listener);
bRet = workspace.act(task);
} else {
// Running on slave... Have to use the command line as Java API will not
// work on remote hosts. Cannot serialise it...
int version = 2009;
long key = dmSCM.login(getJobUserName(),getJobPasswd(),
getJobDatabase(),getJobServer());
if (key>0) {
// Get the server version
Logger.Debug("Login worked.");
version = dmSCM.getDmVersion();
if (version == 0) {
version = 2009;
}
dmSCM.logout(key);
}
{
// VariableResolver does not appear to be serialisable either, so...
VariableResolver<String> myResolver = build.getBuildVariableResolver();
String baseline = myResolver.resolve("DM_BASELINE");
String requests = myResolver.resolve("DM_REQUEST");
listener.getLogger().println("[DIMENSIONS] Running checkout on slave...");
listener.getLogger().flush();
CheckOutCmdTask task = new CheckOutCmdTask(getJobUserName(), getJobPasswd(),
getJobDatabase(), getJobServer(),
getProject(), baseline, requests,
isCanJobDelete(),
isCanJobRevert(), isCanJobForce(),
(build.getPreviousBuild() == null),
getFolders(),version,
workspace,listener);
bRet = workspace.act(task);
}
}
} else {
bRet = true;
}
if (bRet) {
bRet = generateChangeSet(build,listener,changelogFile);
}
}
catch(Exception e)
{
String errMsg = e.getMessage();
if (errMsg == null) {
errMsg = "An unknown error occurred. Please try the operation again.";
}
listener.fatalError("Unable to run checkout callout - " + errMsg);
// e.printStackTrace();
//throw new IOException("Unable to run checkout callout - " + e.getMessage());
bRet = false;
}
return bRet;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* generateChangeSet
* Description:
* Generate the changeset
* Parameters:
* @param AbstractProject build
* @param BuildListener listener
* @param File changelogFile
* Return:
* @return boolean
*-----------------------------------------------------------------
*/
private boolean generateChangeSet(final AbstractBuild build, final BuildListener listener,
final File changelogFile)
throws IOException, InterruptedException {
long key = -1;
boolean bRet = false;
DimensionsAPI dmSCM = new DimensionsAPI();
try
{
// When are we building files for?
// Looking for the last successful build and then going forward from there - could use the last build as well
//
// Calendar lastBuildCal = (build.getPreviousBuild() != null) ? build.getPreviousBuild().getTimestamp() : null;
Calendar lastBuildCal = (build.getPreviousNotFailedBuild() != null) ? build.getPreviousNotFailedBuild().getTimestamp() : null;
Calendar nowDateCal = Calendar.getInstance();
TimeZone tz = (getJobTimeZone() != null && getJobTimeZone().length() > 0) ? TimeZone.getTimeZone(getJobTimeZone()) : TimeZone.getDefault();
if (getJobTimeZone() != null && getJobTimeZone().length() > 0)
Logger.Debug("Job timezone setting is " + getJobTimeZone());
Logger.Debug("Log updates between " + ((lastBuildCal != null) ? DateUtils.getStrDate(lastBuildCal,tz) : "0") +
" -> " + DateUtils.getStrDate(nowDateCal,tz) + " (" + tz.getID() + ")");
dmSCM.setLogger(listener.getLogger());
// Connect to Dimensions...
key = dmSCM.login(getJobUserName(),getJobPasswd(),
getJobDatabase(),getJobServer());
if (key>0)
{
Logger.Debug("Login worked.");
VariableResolver<String> myResolver = build.getBuildVariableResolver();
String baseline = myResolver.resolve("DM_BASELINE");
String requests = myResolver.resolve("DM_REQUEST");
if (baseline != null) {
baseline = baseline.trim();
baseline = baseline.toUpperCase();
}
if (requests != null) {
requests = requests.replaceAll(" ","");
requests = requests.toUpperCase();
}
Logger.Debug("Extra parameters - " + baseline + " " + requests);
String[] folders = getFolders();
if (baseline != null && baseline.length() == 0)
baseline = null;
if (requests != null && requests.length() == 0)
requests = null;
bRet = true;
// Iterate through the project folders and process them in Dimensions
for (int ii=0;ii<folders.length; ii++) {
if (!bRet)
break;
String folderN = folders[ii];
File fileName = new File(folderN);
FilePath dname = new FilePath(fileName);
Logger.Debug("Looking for changes in '" + folderN + "'...");
// Checkout the folder
bRet = dmSCM.createChangeSetLogs(key,getProject(),dname,
lastBuildCal,nowDateCal,
changelogFile, tz,
jobWebUrl,
baseline,requests);
}
// Close the changes log file
{
FileWriter logFile = null;
try {
logFile = new FileWriter(changelogFile,true);
PrintWriter fmtWriter = new PrintWriter(logFile);
fmtWriter.println("</changelog>");
logFile.flush();
bRet=true;
} catch (Exception e) {
throw new IOException("Unable to write change log - " + e.getMessage());
} finally {
logFile.close();
}
}
}
}
catch(Exception e)
{
String errMsg = e.getMessage();
if (errMsg == null) {
errMsg = "An unknown error occurred. Please try the operation again.";
}
- listener.fatalError("Unable to run checkout callout - " + errMsg);
+ listener.fatalError("Unable to run change set callout - " + errMsg);
// e.printStackTrace();
- //throw new IOException("Unable to run checkout callout - " + e.getMessage());
+ //throw new IOException("Unable to run change set callout - " + e.getMessage());
bRet = false;
}
finally
{
dmSCM.logout(key);
}
return bRet;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* pollChanges
* Description:
* Has the repository had any changes?
* Parameters:
* @param AbstractProject project
* @param Launcher launcher
* @param FilePath workspace
* @param TaskListener listener
* Return:
* @return boolean
*-----------------------------------------------------------------
*/
@Override
public boolean pollChanges(final AbstractProject project, final Launcher launcher,
final FilePath workspace, final TaskListener listener)
throws IOException, InterruptedException
{
boolean bChanged = false;
Logger.Debug("Invoking pollChanges - " + this.getClass().getName() );
Logger.Debug("Checking job - " + project.getName());
long key = -1;
if (getProject() == null || getProject().length() == 0)
return false;
if (project.getLastBuild() == null)
return true;
try
{
Calendar lastBuildCal = null;
if (project.getLastSuccessfulBuild() != null && project.getLastSuccessfulBuild().getTimestamp() != null)
lastBuildCal = project.getLastSuccessfulBuild().getTimestamp();
else
lastBuildCal = project.getLastBuild().getTimestamp();
Calendar nowDateCal = Calendar.getInstance();
TimeZone tz = (getJobTimeZone() != null && getJobTimeZone().length() > 0) ? TimeZone.getTimeZone(getJobTimeZone()) : TimeZone.getDefault();
if (getJobTimeZone() != null && getJobTimeZone().length() > 0)
Logger.Debug("Job timezone setting is " + getJobTimeZone());
Logger.Debug("Checking for any updates between " + ((lastBuildCal != null) ? DateUtils.getStrDate(lastBuildCal,tz) : "0") +
" -> " + DateUtils.getStrDate(nowDateCal,tz) + " (" + tz.getID() + ")");
if (dmSCM == null)
{
Logger.Debug("Creating new API interface object");
dmSCM = new DimensionsAPI();
}
dmSCM.setLogger(listener.getLogger());
// Connect to Dimensions...
key = dmSCM.login(jobUserName,
getJobPasswd(),
jobDatabase,
jobServer);
if (key>0)
{
String[] folders = getFolders();
// Iterate through the project folders and process them in Dimensions
for (int ii=0;ii<folders.length; ii++) {
if (bChanged)
break;
String folderN = folders[ii];
File fileName = new File(folderN);
FilePath dname = new FilePath(fileName);
Logger.Debug("Polling using key "+key);
Logger.Debug("Polling '" + folderN + "'...");
bChanged = dmSCM.hasRepositoryBeenUpdated(key,getProject(),
dname,lastBuildCal,
nowDateCal, tz);
}
}
}
catch(Exception e)
{
String errMsg = e.getMessage();
if (errMsg == null) {
errMsg = "An unknown error occurred. Please try the operation again.";
}
listener.fatalError("Unable to run pollChanges callout - " + errMsg);
// e.printStackTrace();
//throw new IOException("Unable to run pollChanges callout - " + e.getMessage());
bChanged = false;
}
finally
{
dmSCM.logout(key);
}
return bChanged;
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* createChangeLogParser
* Description:
* Create a log parser object
* Parameters:
* Return:
* @return ChangeLogParser
*-----------------------------------------------------------------
*/
@Override
public ChangeLogParser createChangeLogParser()
{
Logger.Debug("Invoking createChangeLogParser - " + this.getClass().getName());
return new DimensionsChangeLogParser();
}
/*
*-----------------------------------------------------------------
* FUNCTION SPECIFICATION
* Name:
* SCMDescriptor
* Description:
* Return an SCM descriptor
* Parameters:
* Return:
* @return DescriptorImpl
*-----------------------------------------------------------------
*/
@Override
public DescriptorImpl getDescriptor()
{
return DM_DESCRIPTOR;
}
/*
* Implementation class for Dimensions plugin
*/
public static class
DescriptorImpl extends SCMDescriptor<DimensionsSCM> implements ModelObject {
DimensionsAPI connectionCheck = null;
private String server;
private String userName;
private String passwd;
private String database;
private String timeZone;
private String webUrl;
private boolean canUpdate;
/*
* Loads the SCM descriptor
*/
public DescriptorImpl()
{
super(DimensionsSCM.class, DimensionsSCMRepositoryBrowser.class);
load();
Logger.Debug("Loading " + this.getClass().getName());
}
public String getDisplayName()
{
return "Dimensions";
}
/*
* Save the SCM descriptor configuration
*/
@Override
public boolean configure(StaplerRequest req, JSONObject jobj) throws FormException
{
// Get the values and check them
userName = req.getParameter("dimensionsscm.userName");
passwd = req.getParameter("dimensionsscm.passwd");
server = req.getParameter("dimensionsscm.server");
database = req.getParameter("dimensionsscm.database");
timeZone = req.getParameter("dimensionsscm.timeZone");
webUrl = req.getParameter("dimensionsscm.webUrl");
if (userName != null)
userName = Util.fixNull(req.getParameter("dimensionsscm.userName").trim());
if (passwd != null)
passwd = Util.fixNull(req.getParameter("dimensionsscm.passwd").trim());
if (server != null)
server = Util.fixNull(req.getParameter("dimensionsscm.server").trim());
if (database != null)
database = Util.fixNull(req.getParameter("dimensionsscm.database").trim());
if (timeZone != null)
timeZone = Util.fixNull(req.getParameter("dimensionsscm.timeZone").trim());
if (webUrl != null)
webUrl = Util.fixNull(req.getParameter("dimensionsscm.webUrl").trim());
req.bindJSON(DM_DESCRIPTOR, jobj);
this.save();
return super.configure(req, jobj);
}
@Override
public SCM newInstance(StaplerRequest req, JSONObject formData) throws FormException {
// Get variables and then construct a new object
String[] folders = req.getParameterValues("dimensionsscm.folders");
String project = req.getParameter("dimensionsscm.project");
String directory = req.getParameter("dimensionsscm.directory");
String workarea = req.getParameter("dimensionsscm.workarea");
Boolean canJobDelete = Boolean.valueOf("on".equalsIgnoreCase(req.getParameter("dimensionsscm.canJobDelete")));
Boolean canJobForce = Boolean.valueOf("on".equalsIgnoreCase(req.getParameter("dimensionsscm.canJobForce")));
Boolean canJobRevert = Boolean.valueOf("on".equalsIgnoreCase(req.getParameter("dimensionsscm.canJobRevert")));
Boolean canJobUpdate = Boolean.valueOf("on".equalsIgnoreCase(req.getParameter("dimensionsscm.canJobUpdate")));
String jobUserName = req.getParameter("dimensionsscm.jobUserName");
String jobPasswd = req.getParameter("dimensionsscm.jobPasswd");
String jobServer = req.getParameter("dimensionsscm.jobServer");
String jobDatabase = req.getParameter("dimensionsscm.jobDatabase");
String jobTimeZone = req.getParameter("dimensionsscm.jobTimeZone");
String jobWebUrl = req.getParameter("dimensionsscm.jobWebUrl");
DimensionsSCM scm = new DimensionsSCM(project,folders,workarea,canJobDelete,
canJobForce,canJobRevert,
jobUserName,jobPasswd,
jobServer,jobDatabase,
canJobUpdate,jobTimeZone,
jobWebUrl,directory);
scm.browser = RepositoryBrowsers.createInstance(DimensionsSCMRepositoryBrowser.class,req,formData,"browser");
if (scm.dmSCM == null)
scm.dmSCM = new DimensionsAPI();
return scm;
}
/*
* Gets the timezone for the connection.
* @return the timezone
*/
public String getTimeZone() {
return this.timeZone;
}
/*
* Gets the weburl ID for the connection.
* @return the weburl
*/
public String getWebUrl() {
return this.webUrl;
}
/*
* Gets the user ID for the connection.
* @return the user ID of the user as whom to connect
*/
public String getUserName() {
return this.userName;
}
/*
* Gets the base database for the connection (as "NAME@CONNECTION").
* @return the name of the base database to connect to
*/
public String getDatabase() {
return this.database;
}
/*
* Gets the server for the connection.
* @return the name of the server to connect to
*/
public String getServer() {
return this.server;
}
/*
* Gets the password .
* @return the password
*/
public String getPasswd() {
return Scrambler.descramble(passwd);
}
/*
* Gets the update .
* @return the update
*/
public boolean isCanUpdate() {
return this.canUpdate;
}
/*
* Sets the update .
*/
public void setCanUpdate(boolean x) {
this.canUpdate = x;
}
/*
* Sets the user ID for the connection.
*/
public void setUserName(String userName) {
this.userName = userName;
}
/*
* Sets the base database for the connection (as "NAME@CONNECTION").
*/
public void setDatabase(String database) {
this.database = database;
}
/*
* Sets the server for the connection.
*/
public void setServer(String server) {
this.server = server;
}
/*
* Sets the password .
*/
public void setPasswd(String password) {
this.passwd = Scrambler.scramble(password);
}
/*
* Sets the timezone for the connection.
*/
public void setTimeZone(String x) {
this.timeZone = x;
}
/*
* Sets the weburl ID for the connection.
*/
public void setWebUrl(String x) {
this.webUrl = x;
}
private void doCheck(StaplerRequest req, StaplerResponse rsp)
throws IOException, ServletException
{
new FormFieldValidator(req, rsp, false)
{
@Override
protected void check() throws IOException, ServletException
{
String value = Util.fixEmpty(request.getParameter("value"));
String nullText = null;
if (value == null)
{
if (nullText == null)
ok();
else
error(nullText);
return;
}
else
{
ok();
return;
}
}
}.process();
}
public void domanadatoryFieldCheck(StaplerRequest req, StaplerResponse rsp)
throws IOException, ServletException
{
new FormFieldValidator(req, rsp, false)
{
@Override
protected void check() throws IOException, ServletException
{
String value = Util.fixEmpty(request.getParameter("value"));
String errorTxt = "This value is manadatory.";
if (value == null)
{
error(errorTxt);
return;
}
else
{
// Some processing
ok();
return;
}
}
}.process();
}
public void domanadatoryJobFieldCheck(StaplerRequest req, StaplerResponse rsp)
throws IOException, ServletException
{
new FormFieldValidator(req, rsp, false)
{
@Override
protected void check() throws IOException, ServletException
{
String value = Util.fixEmpty(request.getParameter("value"));
String errorTxt = "This value is manadatory.";
// Some processing in the future
ok();
return;
}
}.process();
}
/*
* Check if the specified Dimensions server is valid
*/
public void docheckTz(StaplerRequest req, StaplerResponse rsp,
@QueryParameter("dimensionsscm.timeZone") final String timezone,
@QueryParameter("dimensionsscm.jobTimeZone") final String jobtimezone)
throws IOException, ServletException
{
new FormFieldValidator(req, rsp, false)
{
@Override
protected void check() throws IOException, ServletException
{
try
{
String xtz = (jobtimezone != null) ? jobtimezone : timezone;
Logger.Debug("Invoking docheckTz - " + xtz);
TimeZone ctz = TimeZone.getTimeZone(xtz);
String lmt = ctz.getID();
if (lmt.equalsIgnoreCase("GMT") && !(xtz.equalsIgnoreCase("GMT") ||
xtz.equalsIgnoreCase("Greenwich Mean Time") ||
xtz.equalsIgnoreCase("UTC") ||
xtz.equalsIgnoreCase("Coordinated Universal Time")))
error("Timezone specified is not valid.");
else
ok("Timezone test succeeded!");
return;
}
catch (Exception e)
{
error("timezone check error:" + e.getMessage());
}
}
}.process();
}
/*
* Check if the specified Dimensions server is valid
*/
public void docheckServer(StaplerRequest req, StaplerResponse rsp,
@QueryParameter("dimensionsscm.userName") final String user,
@QueryParameter("dimensionsscm.passwd") final String passwd,
@QueryParameter("dimensionsscm.server") final String server,
@QueryParameter("dimensionsscm.database") final String database,
@QueryParameter("dimensionsscm.jobUserName") final String jobuser,
@QueryParameter("dimensionsscm.jobPasswd") final String jobPasswd,
@QueryParameter("dimensionsscm.jobServer") final String jobServer,
@QueryParameter("dimensionsscm.jobDatabase") final String jobDatabase)
throws IOException, ServletException
{
new FormFieldValidator(req, rsp, false)
{
@Override
protected void check() throws IOException, ServletException
{
if (connectionCheck == null)
connectionCheck = new DimensionsAPI();
try
{
String xserver = (jobServer != null) ? jobServer : server;
String xuser = (jobuser != null) ? jobuser : user;
String xpasswd = (jobPasswd != null) ? jobPasswd : passwd;
String xdatabase = (jobDatabase != null) ? jobDatabase : database;
long key = -1;
String dmS = xserver + "-" + xuser + ":" + xdatabase;
Logger.Debug("Invoking serverCheck - " + dmS);
key = connectionCheck.login(xuser,
xpasswd,
xdatabase,
xserver);
if (key<1)
{
error("Connection test failed");
}
else
{
ok("Connection test succeeded!");
connectionCheck.logout(key);
}
return;
}
catch (Exception e)
{
error("Server connection error:" + e.getMessage());
}
}
}.process();
}
}
}
| false | true | private boolean generateChangeSet(final AbstractBuild build, final BuildListener listener,
final File changelogFile)
throws IOException, InterruptedException {
long key = -1;
boolean bRet = false;
DimensionsAPI dmSCM = new DimensionsAPI();
try
{
// When are we building files for?
// Looking for the last successful build and then going forward from there - could use the last build as well
//
// Calendar lastBuildCal = (build.getPreviousBuild() != null) ? build.getPreviousBuild().getTimestamp() : null;
Calendar lastBuildCal = (build.getPreviousNotFailedBuild() != null) ? build.getPreviousNotFailedBuild().getTimestamp() : null;
Calendar nowDateCal = Calendar.getInstance();
TimeZone tz = (getJobTimeZone() != null && getJobTimeZone().length() > 0) ? TimeZone.getTimeZone(getJobTimeZone()) : TimeZone.getDefault();
if (getJobTimeZone() != null && getJobTimeZone().length() > 0)
Logger.Debug("Job timezone setting is " + getJobTimeZone());
Logger.Debug("Log updates between " + ((lastBuildCal != null) ? DateUtils.getStrDate(lastBuildCal,tz) : "0") +
" -> " + DateUtils.getStrDate(nowDateCal,tz) + " (" + tz.getID() + ")");
dmSCM.setLogger(listener.getLogger());
// Connect to Dimensions...
key = dmSCM.login(getJobUserName(),getJobPasswd(),
getJobDatabase(),getJobServer());
if (key>0)
{
Logger.Debug("Login worked.");
VariableResolver<String> myResolver = build.getBuildVariableResolver();
String baseline = myResolver.resolve("DM_BASELINE");
String requests = myResolver.resolve("DM_REQUEST");
if (baseline != null) {
baseline = baseline.trim();
baseline = baseline.toUpperCase();
}
if (requests != null) {
requests = requests.replaceAll(" ","");
requests = requests.toUpperCase();
}
Logger.Debug("Extra parameters - " + baseline + " " + requests);
String[] folders = getFolders();
if (baseline != null && baseline.length() == 0)
baseline = null;
if (requests != null && requests.length() == 0)
requests = null;
bRet = true;
// Iterate through the project folders and process them in Dimensions
for (int ii=0;ii<folders.length; ii++) {
if (!bRet)
break;
String folderN = folders[ii];
File fileName = new File(folderN);
FilePath dname = new FilePath(fileName);
Logger.Debug("Looking for changes in '" + folderN + "'...");
// Checkout the folder
bRet = dmSCM.createChangeSetLogs(key,getProject(),dname,
lastBuildCal,nowDateCal,
changelogFile, tz,
jobWebUrl,
baseline,requests);
}
// Close the changes log file
{
FileWriter logFile = null;
try {
logFile = new FileWriter(changelogFile,true);
PrintWriter fmtWriter = new PrintWriter(logFile);
fmtWriter.println("</changelog>");
logFile.flush();
bRet=true;
} catch (Exception e) {
throw new IOException("Unable to write change log - " + e.getMessage());
} finally {
logFile.close();
}
}
}
}
catch(Exception e)
{
String errMsg = e.getMessage();
if (errMsg == null) {
errMsg = "An unknown error occurred. Please try the operation again.";
}
listener.fatalError("Unable to run checkout callout - " + errMsg);
// e.printStackTrace();
//throw new IOException("Unable to run checkout callout - " + e.getMessage());
bRet = false;
}
finally
{
dmSCM.logout(key);
}
return bRet;
}
| private boolean generateChangeSet(final AbstractBuild build, final BuildListener listener,
final File changelogFile)
throws IOException, InterruptedException {
long key = -1;
boolean bRet = false;
DimensionsAPI dmSCM = new DimensionsAPI();
try
{
// When are we building files for?
// Looking for the last successful build and then going forward from there - could use the last build as well
//
// Calendar lastBuildCal = (build.getPreviousBuild() != null) ? build.getPreviousBuild().getTimestamp() : null;
Calendar lastBuildCal = (build.getPreviousNotFailedBuild() != null) ? build.getPreviousNotFailedBuild().getTimestamp() : null;
Calendar nowDateCal = Calendar.getInstance();
TimeZone tz = (getJobTimeZone() != null && getJobTimeZone().length() > 0) ? TimeZone.getTimeZone(getJobTimeZone()) : TimeZone.getDefault();
if (getJobTimeZone() != null && getJobTimeZone().length() > 0)
Logger.Debug("Job timezone setting is " + getJobTimeZone());
Logger.Debug("Log updates between " + ((lastBuildCal != null) ? DateUtils.getStrDate(lastBuildCal,tz) : "0") +
" -> " + DateUtils.getStrDate(nowDateCal,tz) + " (" + tz.getID() + ")");
dmSCM.setLogger(listener.getLogger());
// Connect to Dimensions...
key = dmSCM.login(getJobUserName(),getJobPasswd(),
getJobDatabase(),getJobServer());
if (key>0)
{
Logger.Debug("Login worked.");
VariableResolver<String> myResolver = build.getBuildVariableResolver();
String baseline = myResolver.resolve("DM_BASELINE");
String requests = myResolver.resolve("DM_REQUEST");
if (baseline != null) {
baseline = baseline.trim();
baseline = baseline.toUpperCase();
}
if (requests != null) {
requests = requests.replaceAll(" ","");
requests = requests.toUpperCase();
}
Logger.Debug("Extra parameters - " + baseline + " " + requests);
String[] folders = getFolders();
if (baseline != null && baseline.length() == 0)
baseline = null;
if (requests != null && requests.length() == 0)
requests = null;
bRet = true;
// Iterate through the project folders and process them in Dimensions
for (int ii=0;ii<folders.length; ii++) {
if (!bRet)
break;
String folderN = folders[ii];
File fileName = new File(folderN);
FilePath dname = new FilePath(fileName);
Logger.Debug("Looking for changes in '" + folderN + "'...");
// Checkout the folder
bRet = dmSCM.createChangeSetLogs(key,getProject(),dname,
lastBuildCal,nowDateCal,
changelogFile, tz,
jobWebUrl,
baseline,requests);
}
// Close the changes log file
{
FileWriter logFile = null;
try {
logFile = new FileWriter(changelogFile,true);
PrintWriter fmtWriter = new PrintWriter(logFile);
fmtWriter.println("</changelog>");
logFile.flush();
bRet=true;
} catch (Exception e) {
throw new IOException("Unable to write change log - " + e.getMessage());
} finally {
logFile.close();
}
}
}
}
catch(Exception e)
{
String errMsg = e.getMessage();
if (errMsg == null) {
errMsg = "An unknown error occurred. Please try the operation again.";
}
listener.fatalError("Unable to run change set callout - " + errMsg);
// e.printStackTrace();
//throw new IOException("Unable to run change set callout - " + e.getMessage());
bRet = false;
}
finally
{
dmSCM.logout(key);
}
return bRet;
}
|
diff --git a/jtrim-gui/src/main/java/org/jtrim/swing/component/Graphics2DComponent.java b/jtrim-gui/src/main/java/org/jtrim/swing/component/Graphics2DComponent.java
index 0ad6c6b1..08c8046f 100644
--- a/jtrim-gui/src/main/java/org/jtrim/swing/component/Graphics2DComponent.java
+++ b/jtrim-gui/src/main/java/org/jtrim/swing/component/Graphics2DComponent.java
@@ -1,109 +1,109 @@
package org.jtrim.swing.component;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import javax.swing.JComponent;
/**
* Defines a convenient base class for user drawn <I>Swing</I> components. This
* class defines an abstract {@link #paintComponent2D(Graphics2D) paintComponent2D}
* in place of the inherited {@link #paintComponent(Graphics) paintComponent}
* method. The main difference between these methods is that
* {@code paintComponent2D} takes a {@code Graphics2D} object rather than a
* {@code Graphics}.
* <P>
* The thread-safety property of this component is the same as with any other
* <I>Swing</I> components. That is, instances of this class can be accessed
* only from the AWT Event Dispatch Thread after made displayable.
*
* @author Kelemen Attila
*/
@SuppressWarnings("serial")
public abstract class Graphics2DComponent extends JComponent {
private static final Color TRANSPARENT_COLOR = new Color(0, 0, 0, 0);
private BufferedImage fallbackImage;
/**
* Initializes this Graphics2DComponent.
*/
public Graphics2DComponent() {
this.fallbackImage = null;
}
/**
* Subclasses must override this method to paint this component.
* Implementations must honor the {@code opaque} property of this component
* but does not need to preserve the context of the passed
* {@code Graphics2D} object (unlike with {@code paintComponent}).
*
* @param g the {@code Graphics2D} to paint to. This argument cannot be
* {@code null}.
*/
protected abstract void paintComponent2D(Graphics2D g);
/**
* Invokes {@link #paintComponent2D(Graphics2D) paintComponent2D} with a
* {@code Graphics2D} object to draw on.
* <P>
* In case a {@code Graphics2D} object is passed to this method a copy of
* the context (via {@code Graphics2D.create()}) will be created and passed
* to {@code paintComponent2D}, otherwise a {@code BufferedImage} of the
* type {@code BufferedImage.TYPE_INT_ARGB} will be created to draw on and
* its {@code Graphics2D} will be passed to {@code paintComponent2D}.
* <P>
* This method cannot be overridden, override {@code paintComponent2D}
* instead.
*
* @param g the {@code Graphics} to paint to. This argument cannot be
* {@code null}.
*/
@Override
protected final void paintComponent(Graphics g) {
int currentWidth = getWidth();
int currentHeight = getHeight();
Graphics scratchGraphics = null;
Graphics2D g2d = null;
boolean useBufferedImage = false;
try {
if (g instanceof Graphics2D) {
scratchGraphics = g.create();
if (scratchGraphics instanceof Graphics2D) {
g2d = (Graphics2D)scratchGraphics;
}
else {
scratchGraphics.dispose();
scratchGraphics = null;
}
}
if (g2d == null) {
useBufferedImage = true;
- if (fallbackImage == null ||
- fallbackImage.getWidth() != currentWidth ||
- fallbackImage.getHeight() != currentHeight) {
+ if (fallbackImage == null
+ || fallbackImage.getWidth() != currentWidth
+ || fallbackImage.getHeight() != currentHeight) {
fallbackImage = new BufferedImage(currentWidth, currentHeight,
BufferedImage.TYPE_INT_ARGB);
}
g2d = fallbackImage.createGraphics();
scratchGraphics = g2d;
g2d.setColor(TRANSPARENT_COLOR);
g2d.fillRect(0, 0, currentWidth, currentHeight);
}
paintComponent2D(g2d);
} finally {
if (scratchGraphics != null) {
scratchGraphics.dispose();
}
if (useBufferedImage) {
g.drawImage(fallbackImage, 0, 0, null);
}
}
}
}
| true | true | protected final void paintComponent(Graphics g) {
int currentWidth = getWidth();
int currentHeight = getHeight();
Graphics scratchGraphics = null;
Graphics2D g2d = null;
boolean useBufferedImage = false;
try {
if (g instanceof Graphics2D) {
scratchGraphics = g.create();
if (scratchGraphics instanceof Graphics2D) {
g2d = (Graphics2D)scratchGraphics;
}
else {
scratchGraphics.dispose();
scratchGraphics = null;
}
}
if (g2d == null) {
useBufferedImage = true;
if (fallbackImage == null ||
fallbackImage.getWidth() != currentWidth ||
fallbackImage.getHeight() != currentHeight) {
fallbackImage = new BufferedImage(currentWidth, currentHeight,
BufferedImage.TYPE_INT_ARGB);
}
g2d = fallbackImage.createGraphics();
scratchGraphics = g2d;
g2d.setColor(TRANSPARENT_COLOR);
g2d.fillRect(0, 0, currentWidth, currentHeight);
}
paintComponent2D(g2d);
} finally {
if (scratchGraphics != null) {
scratchGraphics.dispose();
}
if (useBufferedImage) {
g.drawImage(fallbackImage, 0, 0, null);
}
}
}
| protected final void paintComponent(Graphics g) {
int currentWidth = getWidth();
int currentHeight = getHeight();
Graphics scratchGraphics = null;
Graphics2D g2d = null;
boolean useBufferedImage = false;
try {
if (g instanceof Graphics2D) {
scratchGraphics = g.create();
if (scratchGraphics instanceof Graphics2D) {
g2d = (Graphics2D)scratchGraphics;
}
else {
scratchGraphics.dispose();
scratchGraphics = null;
}
}
if (g2d == null) {
useBufferedImage = true;
if (fallbackImage == null
|| fallbackImage.getWidth() != currentWidth
|| fallbackImage.getHeight() != currentHeight) {
fallbackImage = new BufferedImage(currentWidth, currentHeight,
BufferedImage.TYPE_INT_ARGB);
}
g2d = fallbackImage.createGraphics();
scratchGraphics = g2d;
g2d.setColor(TRANSPARENT_COLOR);
g2d.fillRect(0, 0, currentWidth, currentHeight);
}
paintComponent2D(g2d);
} finally {
if (scratchGraphics != null) {
scratchGraphics.dispose();
}
if (useBufferedImage) {
g.drawImage(fallbackImage, 0, 0, null);
}
}
}
|
diff --git a/swows/src/main/java/org/swows/xmlinrdf/DomDecoder.java b/swows/src/main/java/org/swows/xmlinrdf/DomDecoder.java
index 7f92fd5..38f9c4e 100644
--- a/swows/src/main/java/org/swows/xmlinrdf/DomDecoder.java
+++ b/swows/src/main/java/org/swows/xmlinrdf/DomDecoder.java
@@ -1,1774 +1,1775 @@
/*
* Copyright (c) 2011 Miguel Ceriani
* [email protected]
* This file is part of Semantic Web Open datatafloW System (SWOWS).
* SWOWS is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
* SWOWS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General
* Public License along with SWOWS. If not, see <http://www.gnu.org/licenses/>.
*/
package org.swows.xmlinrdf;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeMap;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.swows.graph.events.DynamicGraph;
import org.swows.graph.events.GraphUpdate;
import org.swows.graph.events.Listener;
import org.swows.runnable.RunnableContext;
import org.swows.util.GraphUtils;
import org.swows.util.Utils;
import org.swows.vocabulary.SWI;
import org.swows.vocabulary.XML;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.DOMException;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
import org.w3c.dom.events.Event;
import org.w3c.dom.events.EventListener;
import org.w3c.dom.events.EventTarget;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.sparql.expr.NodeValue;
import com.hp.hpl.jena.sparql.util.NodeComparator;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.Map1;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
public class DomDecoder implements Listener, RunnableContext, EventListener {
private static String VOID_NAMESPACE = "http://www.swows.org/xml/no-namespace";
private static final Logger logger = Logger.getLogger(DomDecoder.class);
private static final Comparator<Node> nodeComparator = new Comparator<Node>() {
@Override
public int compare(Node node1, Node node2) {
return
(node1 == null) ?
( (node2 == null) ? 0 : -1 ) :
( (node2 == null) ?
1 :
NodeValue.compareAlways(
NodeValue.makeNode(node1),
NodeValue.makeNode(node2)));
// TODO check > semantics in sparql
}
};
private static final String specialXmlNamespacesSeparator = "#";
private static final Set<String> specialXmlNamespaces = new HashSet<>(50);
static {
specialXmlNamespaces.add(VOID_NAMESPACE);
specialXmlNamespaces.add("http://www.w3.org/XML/1998/namespace");
specialXmlNamespaces.add("http://www.w3.org/2000/xmlns/");
specialXmlNamespaces.add("http://www.w3.org/1999/xhtml");
specialXmlNamespaces.add("http://www.w3.org/1999/xlink");
specialXmlNamespaces.add("http://www.w3.org/2001/XInclude");
specialXmlNamespaces.add("http://www.w3.org/1999/XSL/Format");
specialXmlNamespaces.add("http://www.w3.org/1999/XSL/Transform");
specialXmlNamespaces.add("http://www.w3.org/XSL/Transform/1.0");
specialXmlNamespaces.add("http://www.w3.org/TR/WD-xsl");
specialXmlNamespaces.add("http://icl.com/saxon");
specialXmlNamespaces.add("http://xml.apache.org/xslt");
specialXmlNamespaces.add("http://xmlns.opentechnology.org/xslt-extensions/common/");
specialXmlNamespaces.add("http://xmlns.opentechnology.org/xslt-extensions/functions");
specialXmlNamespaces.add("http://xmlns.opentechnology.org/xslt-extensions/math");
specialXmlNamespaces.add("http://xmlns.opentechnology.org/xslt-extensions/sets");
specialXmlNamespaces.add("http://www.w3.org/2001/08/xquery-operators");
specialXmlNamespaces.add("http://www.w3.org/2000/svg");
specialXmlNamespaces.add("http://www.w3.org/2001/SMIL20/");
specialXmlNamespaces.add("http://www.w3.org/TR/REC-smil");
specialXmlNamespaces.add("http://www.w3.org/1998/Math/MathML");
}
// private static String specialNamespace(String )
private DocumentReceiver docReceiver;
private DOMImplementation domImplementation;
private DynamicGraph graph;
// private Set<DomEventListener> domEventListeners;
private Map<String, Set<DomEventListener>> domEventListeners;
private Document document;
private RunnableContext updatesContext;
private Map<Node, Set<org.w3c.dom.Node>> graph2domNodeMapping = new HashMap<Node, Set<org.w3c.dom.Node>>();
private Map<Node, org.w3c.dom.Node> graph2domNodeMappingRef = new HashMap<Node, org.w3c.dom.Node>();
private Map<org.w3c.dom.Node, Node> dom2graphNodeMapping = new HashMap<org.w3c.dom.Node, Node>();
private Node docRootNode;
private Map<Element,NavigableMap<Node, Vector<org.w3c.dom.Node>>> dom2orderedByKeyChildren = new HashMap<Element, NavigableMap<Node,Vector<org.w3c.dom.Node>>>();
private Map<Element,Map<org.w3c.dom.Node, Node>> dom2childrenKeys = new HashMap<Element, Map<org.w3c.dom.Node, Node>>();
private Set<Element> dom2descendingOrder = new HashSet<Element>();
private Map<Element, Node> dom2childrenOrderProperty = new HashMap<Element, Node>();
private Map<Element, String> dom2textContent = new HashMap<Element, String>();
private static EventManager DEFAULT_EVENT_MANAGER =
new EventManager() {
public void removeEventListener(
Node targetNode,
org.w3c.dom.Node target, String type,
EventListener listener, boolean useCapture) {
if (target instanceof EventTarget)
((EventTarget) target).removeEventListener(type, listener, useCapture);
}
public void addEventListener(
Node targetNode,
org.w3c.dom.Node target, String type,
EventListener listener, boolean useCapture) {
if (target instanceof EventTarget)
((EventTarget) target).addEventListener(type, listener, useCapture);
}
};
private EventManager eventManager;// = DEFAULT_EVENT_MANAGER;
// private Map<String, Set<Element>> eventType2elements = new HashMap<String, Set<Element>>();
// private Map<Element, Set<String>> element2eventTypes = new HashMap<Element, Set<String>>();
public void addDomEventListener(String eventType, DomEventListener l) {
synchronized(this) {
if (domEventListeners == null)
domEventListeners = new HashMap<String, Set<DomEventListener>>();
}
synchronized(domEventListeners) {
Set<DomEventListener> domEventListenersForType = domEventListeners.get(eventType);
if (domEventListenersForType == null) {
domEventListenersForType = new HashSet<DomEventListener>();
domEventListeners.put(eventType, domEventListenersForType);
}
domEventListenersForType.add(l);
}
}
public void removeDomEventListener(String eventType, DomEventListener l) {
if (domEventListeners != null) {
synchronized(domEventListeners) {
domEventListeners.remove(l);
}
}
}
public synchronized void handleEvent(Event evt) {
logger.debug("In DOM decoder handling event " + evt + " of type " + evt.getType());
// System.out.println("In DOM decoder handling event " + evt + " of type " + evt.getType());
org.w3c.dom.Node eventCurrentTargetDomNode = (org.w3c.dom.Node) evt.getCurrentTarget();
Node eventCurrentTargetGraphNode = dom2graphNodeMapping.get(eventCurrentTargetDomNode);
org.w3c.dom.Node eventTargetDomNode = (org.w3c.dom.Node) evt.getTarget();
Node eventTargetGraphNode = dom2graphNodeMapping.get(eventTargetDomNode);
if (domEventListeners != null) {
synchronized (domEventListeners) {
Set<DomEventListener> domEventListenersForType = domEventListeners.get(evt.getType());
for (DomEventListener l : domEventListenersForType) {
logger.debug("Sending to " + l + " the event " + evt);
l.handleEvent(evt, eventCurrentTargetGraphNode, eventTargetGraphNode);
}
}
}
}
public static Document decode(DynamicGraph graph, Node docRootNode) {
try {
return decode(graph, docRootNode, DOMImplementationRegistry.newInstance().getDOMImplementation("XML 1.0"));
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (InstantiationException e) {
throw new RuntimeException(e);
}
}
public static Document decodeOne(DynamicGraph graph) {
return decodeAll(graph).next();
}
public static ExtendedIterator<Document> decodeAll(final DynamicGraph graph) {
return graph
.find(Node.ANY, RDF.type.asNode(), XML.Document.asNode())
.mapWith(new Map1<Triple, Document>() {
public Document map1(Triple triple) {
return decode(graph, triple.getSubject());
}
});
}
private static String specialName(final Node elementNode) {
if (elementNode.isURI()) {
String uriStr = elementNode.getURI();
int sepPos = uriStr.indexOf(specialXmlNamespacesSeparator);
if ( sepPos > 0
&& specialXmlNamespaces.contains(uriStr.substring(0,sepPos)))
return uriStr.substring(sepPos+1);
}
return null;
}
private static String specialNamespace(final Node elementNode) {
if (elementNode.isURI()) {
String uriStr = elementNode.getURI();
int sepPos = uriStr.indexOf(specialXmlNamespacesSeparator);
if ( sepPos > 0
&& specialXmlNamespaces.contains(uriStr.substring(0,sepPos)))
return uriStr.substring(0,sepPos);
}
return null;
}
private static String qNameElement(final Graph graph, final Node elementNode) {
Node elementType = GraphUtils.getSingleValueProperty(graph, elementNode, RDF.type.asNode());
String specialName = specialName(elementType);
if (specialName != null)
return specialName;
return
GraphUtils
.getSingleValueProperty(
graph,
elementType,
XML.nodeName.asNode() )
.getLiteralLexicalForm();
}
private static String namespaceElement(final Graph graph, final Node elementNode) {
Node elementType = GraphUtils.getSingleValueProperty(graph, elementNode, RDF.type.asNode());
String specialNamespace = specialNamespace(elementType);
if (specialNamespace != null)
return specialNamespace;
try {
return
graph.find(
elementType,
XML.namespace.asNode(),
Node.ANY)
.next().getObject().getURI();
} catch (NoSuchElementException e) {
return null;
}
}
private static boolean predicateIsAttr(final Graph graph, final Node predicate) {
if (predicate.isURI()) {
String uriStr = predicate.getURI();
int sepPos = uriStr.indexOf(specialXmlNamespacesSeparator);
if ( sepPos > 0
&& specialXmlNamespaces.contains(uriStr.substring(0,sepPos)))
return true;
}
return graph.contains(
predicate,
RDFS.subClassOf.asNode(),
XML.Attr.asNode());
}
private static boolean nodeIsRootDocument(final Graph graph, Node nodeType, Node node) {
return nodeType.equals(XML.Document.asNode())
&& graph.contains(SWI.GraphRoot.asNode(), XML.document.asNode(), node);
}
private static boolean nodeTypeIsElementType(final Graph graph, final Node nodeType) {
if (nodeType.isURI()) {
String uriStr = nodeType.getURI();
int sepPos = uriStr.indexOf(specialXmlNamespacesSeparator);
if ( sepPos > 0
&& specialXmlNamespaces.contains(uriStr.substring(0,sepPos)))
return true;
}
return graph.contains(nodeType, RDFS.subClassOf.asNode(), XML.Element.asNode());
}
private static String qNameAttr(final Graph graph, final Node elementNode) {
String specialName = specialName(elementNode);
if (specialName != null)
return specialName;
return
GraphUtils
.getSingleValueProperty(
graph,
elementNode,
XML.nodeName.asNode() )
.getLiteralLexicalForm();
}
private static String namespaceAttr(final Graph graph, final Node elementNode) {
String specialNamespace = specialNamespace(elementNode);
if (specialNamespace != null)
return specialNamespace;
try {
return
graph.find(
elementNode,
XML.namespace.asNode(),
Node.ANY)
.next().getObject().getURI();
} catch (NoSuchElementException e) {
return null;
}
}
private static String value(final Graph graph, final Node elementNode) {
try {
return
graph.find(elementNode, XML.nodeValue.asNode(), Node.ANY)
.next().getObject().getLiteralLexicalForm();
} catch (NoSuchElementException e) {
return null;
}
}
private Text decodeText(Graph graph, Node elementNode) {
return document.createTextNode(value(graph, elementNode));
}
@SuppressWarnings("unused")
private Comment decodeComment(Graph graph, Node elementNode) {
return document.createComment(value(graph, elementNode));
}
@SuppressWarnings("unused")
private ProcessingInstruction decodeProcessingInstruction(Graph graph, Node elementNode) {
return document.createProcessingInstruction(
qNameElement(graph, elementNode),
value(graph, elementNode) );
}
private Attr decodeAttr(Node elementNode) {
String nsUri = namespaceAttr(graph, elementNode);
if (nsUri == null)
throw new RuntimeException("Namespace not found for attribute " + elementNode + " in graph " + graph);
return
(nsUri.equals(VOID_NAMESPACE))
? document.createAttribute( qNameAttr(graph, elementNode) )
: document.createAttributeNS(
nsUri,
qNameAttr(graph, elementNode) );
}
private void removeAttr(Element element, Node elementNode) {
String nsUri = namespaceAttr(graph, elementNode);
if (nsUri == null)
throw new RuntimeException("Namespace not found for attribute " + elementNode + " in graph " + graph);
if (nsUri.equals(VOID_NAMESPACE))
element.removeAttribute( qNameAttr(graph, elementNode) );
else
element.removeAttributeNS(
nsUri,
qNameAttr(graph, elementNode) );
}
public String md5(String md5) {
try {
java.security.MessageDigest md = java.security.MessageDigest.getInstance("MD5");
byte[] array = md.digest(md5.getBytes());
StringBuffer sb = new StringBuffer();
for (int i = 0; i < array.length; ++i) {
sb.append(Integer.toHexString((array[i] & 0xFF) | 0x100).substring(1,3));
}
return sb.toString();
} catch (java.security.NoSuchAlgorithmException e) {
}
return null;
}
private void reorder(
// Set<org.w3c.dom.Node> noKeyChildren,
boolean ascendingOrder,
Element element,
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren ) {
// if (ascendingOrder) {
// for (org.w3c.dom.Node child : noKeyChildren) {
// element.appendChild(child);
// }
// }
String textContent = dom2textContent.get(element);
if (textContent != null)
element.setTextContent(textContent);
for (Node orderKeyNode : ascendingOrder ? orderedByKeyChildren.keySet() : orderedByKeyChildren.descendingKeySet() ) {
for (org.w3c.dom.Node child : orderedByKeyChildren.get(orderKeyNode)) {
element.appendChild(child);
}
}
// if (!ascendingOrder) {
// for (org.w3c.dom.Node child : noKeyChildren) {
// element.appendChild(child);
// }
// }
}
private int elementCount = 0;
private synchronized String newElementId() {
return "n_" + Integer.toHexString(elementCount++);
}
private void putChildByKey(Element parent, org.w3c.dom.Node node, Node orderKeyNode) {
Map<org.w3c.dom.Node, Node> childrenKeys = dom2childrenKeys.get(parent);
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren = dom2orderedByKeyChildren.get(parent);
Vector<org.w3c.dom.Node> sameKeyBag = orderedByKeyChildren.get(orderKeyNode);
if (sameKeyBag == null) {
sameKeyBag = new Vector<org.w3c.dom.Node>();
orderedByKeyChildren.put(orderKeyNode, sameKeyBag);
}
sameKeyBag.add(node);
childrenKeys.put(node, orderKeyNode);
}
private void removeChildByKey(Element parent, org.w3c.dom.Node node) {
Map<org.w3c.dom.Node, Node> childrenKeys = dom2childrenKeys.get(parent);
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren = dom2orderedByKeyChildren.get(parent);
Node prevOrderKeyNode = childrenKeys.get(node);
if (prevOrderKeyNode != null) {
Vector<org.w3c.dom.Node> oldKeyBag = orderedByKeyChildren.get(prevOrderKeyNode);
oldKeyBag.remove(node);
if (oldKeyBag.isEmpty())
orderedByKeyChildren.remove(prevOrderKeyNode);
}
childrenKeys.remove(node);
}
private void updateChildByKey(Element parent, org.w3c.dom.Node node, Node orderKeyNode) {
Map<org.w3c.dom.Node, Node> childrenKeys = dom2childrenKeys.get(parent);
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren = dom2orderedByKeyChildren.get(parent);
Node prevOrderKeyNode = childrenKeys.get(node);
Vector<org.w3c.dom.Node> oldKeyBag = orderedByKeyChildren.get(prevOrderKeyNode);
if (oldKeyBag != null) {
oldKeyBag.remove(node);
if (oldKeyBag.isEmpty())
orderedByKeyChildren.remove(prevOrderKeyNode);
}
Vector<org.w3c.dom.Node> sameKeyBag = orderedByKeyChildren.get(orderKeyNode);
if (sameKeyBag == null) {
sameKeyBag = new Vector<org.w3c.dom.Node>();
orderedByKeyChildren.put(orderKeyNode, sameKeyBag);
}
sameKeyBag.add(node);
childrenKeys.put(node, orderKeyNode);
}
private void setupElementChildren(Node elementNode, Element element) {
Node childrenOrderProperty =
GraphUtils.getSingleValueOptProperty(graph, elementNode, XML.childrenOrderedBy.asNode());
if (childrenOrderProperty != null)
dom2childrenOrderProperty.put(element, childrenOrderProperty);
boolean ascendingOrder =
!graph.contains(elementNode, XML.childrenOrderType.asNode(), XML.Descending.asNode());
if (!ascendingOrder)
dom2descendingOrder.add(element);
ExtendedIterator<Node> children = GraphUtils.getPropertyValues(graph, elementNode, XML.hasChild.asNode());
{
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
new TreeMap<Node, Vector<org.w3c.dom.Node>>(nodeComparator);
Map<org.w3c.dom.Node, Node> childrenKeys =
new HashMap<org.w3c.dom.Node, Node>();
dom2orderedByKeyChildren.put(element, orderedByKeyChildren);
dom2childrenKeys.put(element, childrenKeys);
}
// Set<org.w3c.dom.Node> noKeyChildren = new HashSet<org.w3c.dom.Node>();
while (children.hasNext()) {
Node child = children.next();
// if (!orderedChildren.contains(child)) {
org.w3c.dom.Node domChild = decodeNode(child);
if (domChild != null) {
// addNodeMapping(child, domChild);
Node orderKeyNode =
( childrenOrderProperty != null ) ?
GraphUtils.getSingleValueOptProperty(graph, child, childrenOrderProperty) :
GraphUtils.getSingleValueOptProperty(graph, child, XML.orderKey.asNode());
// if (orderKeyNode == null) {
//// noKeyChildren.add(domChild);
// orderKeyNode = Node.NULL;
// } //else {
updateChildByKey(element, domChild, orderKeyNode);
// Vector<org.w3c.dom.Node> sameKeyBag = orderedByKeyChildren.get(orderKeyNode);
// if (sameKeyBag == null) {
// sameKeyBag = new Vector<org.w3c.dom.Node>();
// orderedByKeyChildren.put(orderKeyNode, sameKeyBag);
// }
// sameKeyBag.add(domChild);
// }
}
// }
}
Iterator<Node> textContentNodes = GraphUtils.getPropertyValues(graph, elementNode, XML.textContent.asNode());
if (textContentNodes.hasNext()) {
String textContent = "";
while (textContentNodes.hasNext()) {
Node textContentNode = textContentNodes.next();
if (textContentNode.isLiteral())
textContent += textContentNode.getLiteralLexicalForm();
}
dom2textContent.put(element, textContent);
} else if (dom2textContent.containsKey(element)) {
dom2textContent.remove(element);
element.setTextContent("");
}
reorder(/*noKeyChildren, */ascendingOrder, element, dom2orderedByKeyChildren.get(element));
}
private void setAttr(Element element, Node attrNode, Node object) {
Attr attr = decodeAttr(attrNode);
if (object.isLiteral())
attr.setValue(object.getLiteralLexicalForm());
else if (object.isURI()) {
org.w3c.dom.Node domNode = decodeIfNode(object,true);
if (domNode != null && (domNode instanceof Element)) {
attr.setValue("#" + ((Element) domNode).getAttribute("id"));
} else {
attr.setValue(object.getURI());
}
}
element.setAttributeNodeNS(attr);
}
private void decodeElementAttrsAndChildren(final Element element, final Node elementNode) {
ExtendedIterator<Triple> triples =
graph.find(elementNode, Node.ANY, Node.ANY);
while (triples.hasNext()) {
Triple t = triples.next();
if ( predicateIsAttr(graph, t.getPredicate()) ) {
setAttr(element, t.getPredicate(), t.getObject());
}
}
if (elementNode.isURI()) {
element.setAttribute("resource", elementNode.getURI());
if (!element.hasAttribute("id"))
element.setAttribute("id", newElementId());
// element.setAttribute("id", elementNode.getURI());
}
// Set<Node> orderedChildren = new HashSet<Node>();
// {
// Node child = GraphUtils.getSingleValueOptProperty(graph, elementNode, XML.firstChild.asNode());
// while (child != null) {
// orderedChildren.add(child);
// org.w3c.dom.Node newChild = decodeNode(graph, child);
// if (newChild != null) {
// addNodeMapping(child, newChild);
// element.appendChild(newChild);
// }
// child = GraphUtils.getSingleValueOptProperty(graph, child, XML.nextSibling.asNode());
// }
// }
// System.out.println("Looking for eventListeners in element " + element + " (" + elementNode + ")");
Iterator<Node> eventTypeNodes = GraphUtils.getPropertyValues(graph, elementNode, XML.listenedEventType.asNode());
while (eventTypeNodes.hasNext()) {
Node eventTypeNode = eventTypeNodes.next();
if (eventTypeNode.isLiteral()) {
String eventType = eventTypeNode.getLiteralLexicalForm();
// System.out.println("Registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element + " (" + elementNode + ")");
// ((EventTarget) element).addEventListener(eventType, this, false);
logger.trace("Calling addEventListener() for new node");
eventManager.addEventListener(elementNode, element, eventType, this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// if (elemsForEventType == null) {
// elemsForEventType = new HashSet<Element>();
// eventType2elements.put(eventType, elemsForEventType);
// ((EventTarget) document).addEventListener(eventType, this, false);
// }
// elemsForEventType.add(element);
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// if (eventTypesForElement == null) {
// eventTypesForElement = new HashSet<String>();
// element2eventTypes.put(element, eventTypesForElement);
// }
// eventTypesForElement.add(eventType);
}
}
setupElementChildren(elementNode, element);
}
private Element decodeElement(final Node elementNode, boolean forRef) {
Element element =
document.createElementNS(
namespaceElement(graph, elementNode),
qNameElement(graph, elementNode) );
if (forRef)
addNodeRefMapping(elementNode, element);
else
addNodeMapping(elementNode, element);
decodeElementAttrsAndChildren(element, elementNode);
return element;
}
private org.w3c.dom.Node decodeIfNode(Node elementNode, boolean forRef) {
org.w3c.dom.Node domNode = graph2domNodeMappingRef.get(elementNode);
if (domNode != null) {
if (!forRef) {
Set<org.w3c.dom.Node> domeNodeSet = graph2domNodeMapping.get(elementNode);
if (domeNodeSet == null) {
domeNodeSet = new HashSet<org.w3c.dom.Node>();
graph2domNodeMapping.put(elementNode, domeNodeSet);
}
domeNodeSet.add(domNode);
graph2domNodeMappingRef.remove(elementNode);
}
return domNode;
}
if (forRef) {
Set<org.w3c.dom.Node> domNodeSet = graph2domNodeMapping.get(elementNode);
if (domNodeSet != null && !domNodeSet.isEmpty())
return domNodeSet.iterator().next();
}
ExtendedIterator<Node> nodeTypes = GraphUtils.getPropertyValues(graph, elementNode, RDF.type.asNode());
Node nodeType;
while(nodeTypes.hasNext()) {
nodeType = nodeTypes.next();
if ( nodeTypeIsElementType(graph, nodeType) )
return decodeElement(elementNode, forRef);
if ( nodeType.equals( XML.Text.asNode() ) )
return decodeText(graph, elementNode);
}
return null;
}
private org.w3c.dom.Node decodeNode(Node elementNode) {
org.w3c.dom.Node node = decodeIfNode(elementNode, false);
if (node == null)
throw new RuntimeException("DOM Type not found for node " + elementNode);
else
return node;
}
private void decodeDocument(Node docRootNode) {
Iterator<Node> possibleDocs =
GraphUtils.getPropertyValues(graph, docRootNode, XML.hasChild.asNode());
while (possibleDocs.hasNext()) {
// try {
Node elementNode = possibleDocs.next();
document =
domImplementation.createDocument(
namespaceElement(graph, elementNode),
qNameElement(graph, elementNode),
null);
if (docRootNode.isURI())
document.setDocumentURI(docRootNode.getURI());
Element docElement = document.getDocumentElement();
addNodeMapping(docRootNode, document);
addNodeMapping(elementNode, docElement);
decodeElementAttrsAndChildren( docElement, elementNode );
// } catch(RuntimeException e) { }
}
}
private void redecodeDocument(Node docRootNode) {
graph2domNodeMapping = new HashMap<Node, Set<org.w3c.dom.Node>>();
graph2domNodeMappingRef = new HashMap<Node, org.w3c.dom.Node>();
dom2graphNodeMapping = new HashMap<org.w3c.dom.Node, Node>();
// eventType2elements = new HashMap<String, Set<Element>>();
// element2eventTypes = new HashMap<Element, Set<String>>();
decodeDocument(docRootNode);
if (docReceiver != null)
docReceiver.sendDocument(document);
}
private void redecodeDocument() {
redecodeDocument(
graph
.find(SWI.GraphRoot.asNode(), XML.document.asNode(), Node.ANY)
.mapWith( new Map1<Triple, Node>() {
public Node map1(Triple t) {
return t.getObject();
}
})
.andThen(
graph
.find(Node.ANY, RDF.type.asNode(), XML.Document.asNode())
.mapWith( new Map1<Triple, Node>() {
public Node map1(Triple t) {
return t.getSubject();
}
}) ).next());
}
private void decodeWorker(DynamicGraph graph, Node docRootNode) {
logger.debug("decoding document from " + Utils.standardStr(graph) + " ...");
decodeDocument(docRootNode);
logger.debug("registering as listener of " + Utils.standardStr(graph) + " ...");
graph.getEventManager2().register(this);
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl) {
return (new DomDecoder(graph, docRootNode, domImpl)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
EventManager eventManager) {
return (new DomDecoder(graph, docRootNode, domImpl, eventManager)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext) {
return (new DomDecoder(graph, docRootNode, domImpl, updatesContext)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
EventManager eventManager) {
return (new DomDecoder(graph, docRootNode, domImpl, updatesContext, eventManager)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, DocumentReceiver docReceiver) {
return (new DomDecoder(graph, docRootNode, domImpl, docReceiver)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, DocumentReceiver docReceiver,
EventManager eventManager) {
return (new DomDecoder(graph, docRootNode, domImpl, docReceiver, eventManager)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
DocumentReceiver docReceiver) {
return (new DomDecoder(graph, docRootNode, domImpl, updatesContext, docReceiver)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
DocumentReceiver docReceiver,
EventManager eventManager) {
return (new DomDecoder(graph, docRootNode, domImpl, updatesContext, docReceiver, eventManager)).getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, eventManager);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, updatesContext);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, updatesContext, eventManager);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, docReceiver);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, docReceiver, eventManager);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, updatesContext, docReceiver);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decode(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl, RunnableContext updatesContext,
DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
DomDecoder domDecoder = new DomDecoder(graph, docRootNode, domImpl, updatesContext, docReceiver, eventManager);
if (domEventListeners != null)
for (String eventType : domEventListeners.keySet())
for (DomEventListener listener : domEventListeners.get(eventType))
domDecoder.addDomEventListener(eventType, listener);
return domDecoder.getDocument();
}
public static Document decodeOne(DynamicGraph graph, DOMImplementation domImpl) {
return decodeAll(graph, domImpl).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
EventManager eventManager) {
return decodeAll(graph, domImpl, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext) {
return decodeAll(graph, domImpl, updatesContext).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
DocumentReceiver docReceiver) {
return decodeAll(graph, domImpl, docReceiver).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
DocumentReceiver docReceiver,
EventManager eventManager) {
return decodeAll(graph, domImpl, docReceiver, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext, DocumentReceiver docReceiver) {
return decodeAll(graph, domImpl, updatesContext, docReceiver).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext, DocumentReceiver docReceiver,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, domEventListeners).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, domEventListeners, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, updatesContext, domEventListeners).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, domEventListeners, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, docReceiver, domEventListeners).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, docReceiver, domEventListeners, eventManager).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext, DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, domEventListeners).next();
}
public static Document decodeOne(
DynamicGraph graph, DOMImplementation domImpl,
RunnableContext updatesContext, DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, domEventListeners, eventManager).next();
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl) {
return decodeAll(graph, domImpl, (RunnableContext) null);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
EventManager eventManager) {
return decodeAll(graph, domImpl, (RunnableContext) null, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final DocumentReceiver docReceiver) {
return decodeAll(graph, domImpl, null, docReceiver);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final DocumentReceiver docReceiver,
EventManager eventManager) {
return decodeAll(graph, domImpl, null, docReceiver, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext) {
return decodeAll(graph, domImpl, updatesContext, (DocumentReceiver) null);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, (DocumentReceiver) null, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext, final DocumentReceiver docReceiver) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, (EventManager) null);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext, final DocumentReceiver docReceiver,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, null, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, (RunnableContext) null, domEventListeners);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, (RunnableContext) null, domEventListeners, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, null, docReceiver, domEventListeners);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final DocumentReceiver docReceiver,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, null, docReceiver, domEventListeners, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, updatesContext, null, domEventListeners);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext,
Map<String,Set<DomEventListener>> domEventListeners,
EventManager eventManager) {
return decodeAll(graph, domImpl, updatesContext, null, domEventListeners, eventManager);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext, final DocumentReceiver docReceiver,
final Map<String,Set<DomEventListener>> domEventListeners) {
return decodeAll(graph, domImpl, updatesContext, docReceiver, domEventListeners, null);
}
public static ExtendedIterator<Document> decodeAll(
final DynamicGraph graph, final DOMImplementation domImpl,
final RunnableContext updatesContext, final DocumentReceiver docReceiver,
final Map<String,Set<DomEventListener>> domEventListeners,
final EventManager eventManager) {
return
graph
.find(SWI.GraphRoot.asNode(), XML.document.asNode(), Node.ANY)
.mapWith( new Map1<Triple, Node>() {
public Node map1(Triple t) {
return t.getObject();
}
})
.andThen(
graph
.find(Node.ANY, RDF.type.asNode(), XML.Document.asNode())
.mapWith( new Map1<Triple, Node>() {
public Node map1(Triple t) {
return t.getSubject();
}
}))
.mapWith( new Map1<Node, Document>() {
public Document map1(Node node) {
return decode(graph, node, domImpl, updatesContext, docReceiver, domEventListeners, eventManager);
}
} );
}
private void addNodeMapping(Node graphNode, org.w3c.dom.Node domNode) {
// System.out.println(this + ": adding mapping ( " + graphNode + " -> " + domNode + " )");
Set<org.w3c.dom.Node> domeNodeSet = graph2domNodeMapping.get(graphNode);
if (domeNodeSet == null) {
domeNodeSet = new HashSet<org.w3c.dom.Node>();
graph2domNodeMapping.put(graphNode, domeNodeSet);
}
domeNodeSet.add(domNode);
dom2graphNodeMapping.put(domNode, graphNode);
}
private void addNodeRefMapping(Node graphNode, org.w3c.dom.Node domNode) {
// System.out.println(this + ": adding mapping ( " + graphNode + " -> " + domNode + " )");
graph2domNodeMappingRef.put(graphNode, domNode);
dom2graphNodeMapping.put(domNode, graphNode);
}
private void removeNodeMapping(org.w3c.dom.Node domNode) {
Node graphNode = dom2graphNodeMapping.get(domNode);
if (graphNode != null) {
dom2graphNodeMapping.remove(domNode);
Set<org.w3c.dom.Node> domeNodeSet = graph2domNodeMapping.get(graphNode);
domeNodeSet.remove(domNode);
if (domeNodeSet.isEmpty()) {
graph2domNodeMapping.remove(graphNode);
}
// graph2domNodeMappingRef.remove(graphNode);
// } else
// if (graph2domNodeMappingRef.get(graphNode).equals(domNode))
// graph2domNodeMappingRef.put(graphNode, domeNodeSet.iterator().next());
}
dom2orderedByKeyChildren.remove(domNode);
dom2childrenOrderProperty.remove(domNode);
dom2descendingOrder.remove(domNode);
dom2textContent.remove(domNode);
}
private void removeSubtreeMapping(org.w3c.dom.Node domNode) {
// System.out.println(this + ": removing subtree mapping of " + domNode );
removeNodeMapping(domNode);
NamedNodeMap attrMap = domNode.getAttributes();
if (attrMap != null) {
for (int i = 0; i < attrMap.getLength(); i++) {
removeSubtreeMapping(attrMap.item(i));
}
}
NodeList children = domNode.getChildNodes();
if (children != null) {
for (int i = 0; i < children.getLength(); i++) {
removeSubtreeMapping(children.item(i));
}
}
// if (domNode instanceof Element) {
// Element element = (Element) domNode;
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// if (eventTypesForElement != null) {
// for (String eventType : eventTypesForElement) {
// Set<Element> elementsForEventType = eventType2elements.get(eventType);
// elementsForEventType.remove(element);
// if (elementsForEventType.isEmpty()) {
// eventType2elements.remove(eventType);
// ((EventTarget) document).removeEventListener(eventType, DomDecoder2.this, false);
// }
// }
// element2eventTypes.remove(element);
// }
// }
}
// private DomDecoder(DynamicGraph graph) {
// this.graph = graph;
// this.updatesContext = this;
// }
//
private DomDecoder(DomDecoder domDecoder) {
this.graph = domDecoder.graph;
this.elementCount = domDecoder.elementCount;
this.docRootNode = domDecoder.docRootNode;
this.domImplementation = domDecoder.domImplementation;
this.dom2childrenKeys = domDecoder.dom2childrenKeys;
this.dom2childrenOrderProperty = domDecoder.dom2childrenOrderProperty;
this.dom2textContent = domDecoder.dom2textContent;
this.dom2descendingOrder = domDecoder.dom2descendingOrder;
this.dom2graphNodeMapping = domDecoder.dom2graphNodeMapping;
this.dom2orderedByKeyChildren = domDecoder.dom2orderedByKeyChildren;
this.eventManager = domDecoder.eventManager;
this.updatesContext = this;
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl) {
this(graph, docRootNode, domImpl, (DocumentReceiver) null);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
EventManager eventManager) {
this(graph, docRootNode, domImpl, (DocumentReceiver) null, eventManager);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
DocumentReceiver docReceiver) {
this(graph, docRootNode, domImpl, null, docReceiver);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
DocumentReceiver docReceiver,
EventManager eventManager) {
this(graph, docRootNode, domImpl, null, docReceiver, eventManager);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
RunnableContext updatesContext) {
this(graph, docRootNode, domImpl, updatesContext,DEFAULT_EVENT_MANAGER);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
RunnableContext updatesContext,
EventManager eventManager) {
this(graph, docRootNode, domImpl, updatesContext, null, eventManager);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
RunnableContext updatesContext,
DocumentReceiver docReceiver) {
this(graph, docRootNode, domImpl, updatesContext, docReceiver, DEFAULT_EVENT_MANAGER);
}
private DomDecoder(
DynamicGraph graph, Node docRootNode,
DOMImplementation domImpl,
RunnableContext updatesContext,
DocumentReceiver docReceiver,
EventManager eventManager) {
this.graph = graph;
this.domImplementation = domImpl;
this.updatesContext = ( updatesContext == null ? this : updatesContext );
this.docReceiver = docReceiver;
this.eventManager = eventManager;
this.docRootNode = docRootNode;
decodeWorker(graph, docRootNode);
}
public void run(Runnable runnable) {
runnable.run();
}
private Document getDocument() {
return document;
}
private void orderChild(org.w3c.dom.Node node, Element parent, Node orderKeyNode) {
putChildByKey(parent, node, orderKeyNode);
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(parent);
if (!dom2descendingOrder.contains(parent)) {
Entry<Node, Vector<org.w3c.dom.Node>> nextEntry =
orderedByKeyChildren.higherEntry(orderKeyNode);
if (nextEntry == null) {
parent.appendChild(node);
} else {
parent.insertBefore(node, nextEntry.getValue().firstElement());
}
} else {
Entry<Node, Vector<org.w3c.dom.Node>> nextEntry =
orderedByKeyChildren.lowerEntry(orderKeyNode);
if (nextEntry == null) {
parent.appendChild(node);
} else {
parent.insertBefore(node, nextEntry.getValue().firstElement());
}
}
}
private void reorderChild(org.w3c.dom.Node node, Element parent, Node orderKeyNode) {
orderChild(node, parent, orderKeyNode);
}
private void insertChildInOrder(org.w3c.dom.Node child, Node childNode, Element parent) {
Node childrenOrderProperty = dom2childrenOrderProperty.get(parent);
Node orderKeyNode =
( childrenOrderProperty != null ) ?
GraphUtils.getSingleValueOptProperty(graph, childNode, childrenOrderProperty) :
GraphUtils.getSingleValueOptProperty(graph, childNode, XML.orderKey.asNode());
orderChild(child, parent, orderKeyNode);
}
private void removeChild(org.w3c.dom.Node node, Element parent) {
removeChildByKey(parent, node);
parent.removeChild(node);
}
private boolean mustReorderAllChildrenOf(Element parent, GraphUpdate update) {
Node parentNode = dom2graphNodeMapping.get(parent);
return
update.getAddedGraph().contains(parentNode, XML.childrenOrderedBy.asNode(), Node.ANY)
|| update.getAddedGraph().contains(parentNode, XML.childrenOrderType.asNode(), Node.ANY)
|| update.getDeletedGraph().contains(parentNode, XML.childrenOrderedBy.asNode(), Node.ANY)
|| update.getDeletedGraph().contains(parentNode, XML.childrenOrderType.asNode(), Node.ANY);
}
public synchronized void notifyUpdate(final Graph sourceGraph, final GraphUpdate update) {
logger.debug("Begin of Notify Update in DOM Decoder");
if (!update.getAddedGraph().isEmpty() || !update.getDeletedGraph().isEmpty()) {
updatesContext.run(
new Runnable() {
@SuppressWarnings("unchecked")
public void run() {
ExtendedIterator<Triple> addEventsIter =
update.getAddedGraph().find(Node.ANY, Node.ANY, Node.ANY);
DomDecoder newDom = new DomDecoder(DomDecoder.this);
newDom.dom2graphNodeMapping =
(Map<org.w3c.dom.Node, Node>) ((HashMap<org.w3c.dom.Node, Node>) dom2graphNodeMapping).clone();
// newDom.graph2domNodeMapping = (Map<Node, Set<org.w3c.dom.Node>>) ((HashMap<Node, Set<org.w3c.dom.Node>>) graph2domNodeMapping).clone();
for (Node key : graph2domNodeMapping.keySet()) {
newDom.graph2domNodeMapping.put(key, (Set<org.w3c.dom.Node>) ((HashSet<org.w3c.dom.Node>) graph2domNodeMapping.get(key)).clone());
}
newDom.document = document;
// newDom.document = (Document) document.cloneNode(true);
while (addEventsIter.hasNext()) {
final Triple newTriple = addEventsIter.next();
// org.w3c.dom.Node xmlSubj = nodeMapping.get(newTriple.getSubject());
//System.out.println("Checking add event " + newTriple);
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(newTriple.getSubject());
// if (domSubjs == null)
// logger.warn(this + ": managing add event " + newTriple + ", domSubjs is null");
if (domSubjs != null) {
logger.trace("Managing add event " + newTriple + " for domSubjs " + domSubjs);
Set<org.w3c.dom.Node> domSubjsTemp = new HashSet<org.w3c.dom.Node>();
domSubjsTemp.addAll(domSubjs);
Iterator<org.w3c.dom.Node> domSubjIter = domSubjsTemp.iterator();
// Basic properties: DOM node must be recreated
if (newTriple.getPredicate().equals(RDF.type.asNode())
|| newTriple.getPredicate().equals(XML.nodeName.asNode())) {
//org.w3c.dom.Node parentNode = null;
Node nodeType = newTriple.getObject();
if ( nodeTypeIsElementType(graph,nodeType) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
org.w3c.dom.Node newNode = newDom.decodeNode(newTriple.getSubject());
parentNode.replaceChild(newNode, domSubj);
newDom.removeSubtreeMapping(domSubj);
// newDom.addNodeMapping(newTriple.getSubject(), newNode);
}
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is a DOM Attribute
} else if ( predicateIsAttr(graph, newTriple.getPredicate()) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
- Attr newAttr = newDom.decodeAttr(newTriple.getPredicate());
+// Attr newAttr = newDom.decodeAttr(newTriple.getPredicate());
// newDom.addNodeMapping(newTriple.getPredicate(), newAttr);
- newAttr.setValue(newTriple.getObject().getLiteralLexicalForm());
- element.setAttributeNodeNS(newAttr);
+ setAttr(element, newTriple.getPredicate(), newTriple.getObject());
+// newAttr.setValue(newTriple.getObject().getLiteralLexicalForm());
+// element.setAttributeNodeNS(newAttr);
}
// Predicate is xml:hasChild
} else if ( newTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
Node nodeType =
graph
.find(newTriple.getSubject(), XML.nodeType.asNode(), Node.ANY)
.next().getObject();
logger.trace("Managing add hasChild (" + newTriple + ") for domSubjs " + domSubjs + " and node type " + nodeType);
if (nodeTypeIsElementType(graph,nodeType)) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
org.w3c.dom.Node newChild = newDom.decodeNode(newTriple.getObject());
// newDom.addNodeMapping(newTriple.getObject(), newChild);
// element.appendChild(newChild);
insertChildInOrder(newChild, newTriple.getObject(), element);
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is xml:nodeValue
} else if ( newTriple.getPredicate().equals(XML.nodeValue.asNode()) ) {
logger.trace("Managing add nodeValue (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
node.setNodeValue(newTriple.getObject().getLiteralLexicalForm());
}
// Predicate is orderKey
} else if ( newTriple.getPredicate().equals(XML.orderKey.asNode()) ) {
logger.trace("Managing add orderKey (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
newTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& newTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(newTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)) {
logger.trace("Managing add childrenOrderType (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (!dom2descendingOrder.contains(node)) {
dom2descendingOrder.add((Element) node);
reorder(false, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if ( newTriple.getPredicate().equals(XML.childrenOrderedBy.asNode()) ) {
logger.trace("Managing add childrenOrderedBy (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate textContent
} else if ( newTriple.getPredicate().equals(XML.textContent.asNode()) ) {
logger.trace("Managing add textContent (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate is xml:listenedEventType
} else if ( newTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = newTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
logger.trace("On add, registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element /*+ " (" + elementNode + ")"*/);
// ((EventTarget) element).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.addEventListener(
newTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// if (elemsForEventType == null) {
// elemsForEventType = new HashSet<Element>();
// eventType2elements.put(eventType, elemsForEventType);
// ((EventTarget) document).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
// elemsForEventType.add(element);
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// if (eventTypesForElement == null) {
// eventTypesForElement = new HashSet<String>();
// element2eventTypes.put(element, eventTypesForElement);
// }
// eventTypesForElement.add(eventType);
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(newTriple.getPredicate())
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing add predicate is the childrenOrderedBy for some parent (" + newTriple + ")");
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
}
}
ExtendedIterator<Triple> deleteEventsIter =
update.getDeletedGraph().find(Node.ANY, Node.ANY, Node.ANY);
while (deleteEventsIter.hasNext()) {
Triple oldTriple = deleteEventsIter.next();
//org.w3c.dom.Node xmlSubj = nodeMapping.get(oldTriple.getSubject());
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(oldTriple.getSubject());
//System.out.println("Checking for " + oldTriple.getSubject() + " contained in " + sourceGraph);
if (domSubjs != null && graph.contains(oldTriple.getSubject(), RDF.type.asNode(), Node.ANY)) {
//System.out.println("Found " + oldTriple.getSubject() + " contained in " + sourceGraph);
//System.out.println("Managing " + oldTriple.getPredicate() + "/" + oldTriple.getObject());
Iterator<org.w3c.dom.Node> domSubjIter = domSubjs.iterator();
if ( ( oldTriple.getPredicate().equals(XML.nodeName.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeName.asNode(), Node.ANY) )
|| ( oldTriple.getPredicate().equals(XML.nodeType.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeType.asNode(), Node.ANY) ) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
switch (domSubj.getNodeType()) {
case org.w3c.dom.Node.ATTRIBUTE_NODE:
Attr oldAttr = (Attr) domSubj;
Element ownerElement = oldAttr.getOwnerElement();
ownerElement.removeAttributeNode(oldAttr);
newDom.removeSubtreeMapping(oldAttr);
break;
case org.w3c.dom.Node.DOCUMENT_NODE:
if ( oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
break;
default:
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
parentNode.removeChild(domSubj);
newDom.removeSubtreeMapping(domSubj);
}
}
}
} else if (
oldTriple.getPredicate().equals(XML.nodeValue.asNode())
&& !graph.contains(oldTriple.getSubject(), XML.nodeValue.asNode(), Node.ANY)) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
domSubj.setNodeValue("");
}
} else if (
predicateIsAttr(graph, oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), oldTriple.getPredicate(), Node.ANY) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
newDom.removeAttr(element, oldTriple.getPredicate());
}
// Set<org.w3c.dom.Node> domObjsOrig = graph2domNodeMapping.get(oldTriple.getObject());
// if (domObjsOrig != null) {
// Set<org.w3c.dom.Node> domObjs = new HashSet<org.w3c.dom.Node>();
// domObjs.addAll(domObjsOrig);
// while (domSubjIter.hasNext()) {
// Element element = (Element) domSubjIter.next();
// Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
// while (domObjsIter.hasNext()) {
// try {
// Attr oldAttr = (Attr) domObjsIter.next();
// if ( oldAttr.getNamespaceURI() == null
// ? element.hasAttribute(oldAttr.getName())
// : element.hasAttributeNS(oldAttr.getNamespaceURI(), oldAttr.getLocalName()))
// element.removeAttributeNode(oldAttr);
// newDom.removeSubtreeMapping(oldAttr);
// } catch(DOMException e) {
// if (!e.equals(DOMException.NOT_FOUND_ERR))
// throw e;
// }
// }
// }
// }
} else if ( oldTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
if ( domSubj.getNodeType() == org.w3c.dom.Node.DOCUMENT_NODE && oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
Set<org.w3c.dom.Node> domObjs = graph2domNodeMapping.get(oldTriple.getObject());
if (domObjs != null) {
Element element = (Element) domSubj;
Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
while (domObjsIter.hasNext()) {
try {
org.w3c.dom.Node domObj = domObjsIter.next();
removeChild(domObj, element);
newDom.removeSubtreeMapping(domObj);
} catch(DOMException e) {
if (!e.equals(DOMException.NOT_FOUND_ERR))
throw e;
}
}
}
}
// Predicate is orderKey
} else if (
oldTriple.getPredicate().equals(XML.orderKey.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.orderKey.asNode(), Node.ANY) ) {
logger.trace("Managing delete orderKey (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = oldTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& oldTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)
&& !update.getDeletedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderType (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (dom2descendingOrder.contains(node)) {
dom2descendingOrder.remove((Element) node);
reorder(true, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderedBy.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderedBy (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
// Predicate xml:textContent
} else if (
oldTriple.getPredicate().equals(XML.textContent.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.textContent.asNode(), Node.ANY)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete textContent (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
} else if ( oldTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = oldTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
// System.out.println("Registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element + " (" + elementNode + ")");
// ((EventTarget) element).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.removeEventListener(
oldTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// elemsForEventType.remove(element);
// if (elemsForEventType.isEmpty()) {
// eventType2elements.remove(eventType);
// ((EventTarget) document).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// eventTypesForElement.remove(eventType);
// if (eventTypesForElement.isEmpty()) {
// element2eventTypes.remove(element);
// }
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), childrenOrderProperty, Node.ANY)
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing delete predicate that is the childrenOrderedBy for some parent (" + oldTriple + ")");
reorderChild(node, (Element) parent, null);
}
}
}
}
// System.out.println("End of notifyEvents() in " + this);
}
dom2graphNodeMapping = newDom.dom2graphNodeMapping;
graph2domNodeMapping = newDom.graph2domNodeMapping;
// document = newDom.document;
}
});
}
logger.debug("End of Notify Update in DOM Decoder");
}
}
| false | true | public synchronized void notifyUpdate(final Graph sourceGraph, final GraphUpdate update) {
logger.debug("Begin of Notify Update in DOM Decoder");
if (!update.getAddedGraph().isEmpty() || !update.getDeletedGraph().isEmpty()) {
updatesContext.run(
new Runnable() {
@SuppressWarnings("unchecked")
public void run() {
ExtendedIterator<Triple> addEventsIter =
update.getAddedGraph().find(Node.ANY, Node.ANY, Node.ANY);
DomDecoder newDom = new DomDecoder(DomDecoder.this);
newDom.dom2graphNodeMapping =
(Map<org.w3c.dom.Node, Node>) ((HashMap<org.w3c.dom.Node, Node>) dom2graphNodeMapping).clone();
// newDom.graph2domNodeMapping = (Map<Node, Set<org.w3c.dom.Node>>) ((HashMap<Node, Set<org.w3c.dom.Node>>) graph2domNodeMapping).clone();
for (Node key : graph2domNodeMapping.keySet()) {
newDom.graph2domNodeMapping.put(key, (Set<org.w3c.dom.Node>) ((HashSet<org.w3c.dom.Node>) graph2domNodeMapping.get(key)).clone());
}
newDom.document = document;
// newDom.document = (Document) document.cloneNode(true);
while (addEventsIter.hasNext()) {
final Triple newTriple = addEventsIter.next();
// org.w3c.dom.Node xmlSubj = nodeMapping.get(newTriple.getSubject());
//System.out.println("Checking add event " + newTriple);
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(newTriple.getSubject());
// if (domSubjs == null)
// logger.warn(this + ": managing add event " + newTriple + ", domSubjs is null");
if (domSubjs != null) {
logger.trace("Managing add event " + newTriple + " for domSubjs " + domSubjs);
Set<org.w3c.dom.Node> domSubjsTemp = new HashSet<org.w3c.dom.Node>();
domSubjsTemp.addAll(domSubjs);
Iterator<org.w3c.dom.Node> domSubjIter = domSubjsTemp.iterator();
// Basic properties: DOM node must be recreated
if (newTriple.getPredicate().equals(RDF.type.asNode())
|| newTriple.getPredicate().equals(XML.nodeName.asNode())) {
//org.w3c.dom.Node parentNode = null;
Node nodeType = newTriple.getObject();
if ( nodeTypeIsElementType(graph,nodeType) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
org.w3c.dom.Node newNode = newDom.decodeNode(newTriple.getSubject());
parentNode.replaceChild(newNode, domSubj);
newDom.removeSubtreeMapping(domSubj);
// newDom.addNodeMapping(newTriple.getSubject(), newNode);
}
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is a DOM Attribute
} else if ( predicateIsAttr(graph, newTriple.getPredicate()) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
Attr newAttr = newDom.decodeAttr(newTriple.getPredicate());
// newDom.addNodeMapping(newTriple.getPredicate(), newAttr);
newAttr.setValue(newTriple.getObject().getLiteralLexicalForm());
element.setAttributeNodeNS(newAttr);
}
// Predicate is xml:hasChild
} else if ( newTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
Node nodeType =
graph
.find(newTriple.getSubject(), XML.nodeType.asNode(), Node.ANY)
.next().getObject();
logger.trace("Managing add hasChild (" + newTriple + ") for domSubjs " + domSubjs + " and node type " + nodeType);
if (nodeTypeIsElementType(graph,nodeType)) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
org.w3c.dom.Node newChild = newDom.decodeNode(newTriple.getObject());
// newDom.addNodeMapping(newTriple.getObject(), newChild);
// element.appendChild(newChild);
insertChildInOrder(newChild, newTriple.getObject(), element);
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is xml:nodeValue
} else if ( newTriple.getPredicate().equals(XML.nodeValue.asNode()) ) {
logger.trace("Managing add nodeValue (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
node.setNodeValue(newTriple.getObject().getLiteralLexicalForm());
}
// Predicate is orderKey
} else if ( newTriple.getPredicate().equals(XML.orderKey.asNode()) ) {
logger.trace("Managing add orderKey (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
newTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& newTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(newTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)) {
logger.trace("Managing add childrenOrderType (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (!dom2descendingOrder.contains(node)) {
dom2descendingOrder.add((Element) node);
reorder(false, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if ( newTriple.getPredicate().equals(XML.childrenOrderedBy.asNode()) ) {
logger.trace("Managing add childrenOrderedBy (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate textContent
} else if ( newTriple.getPredicate().equals(XML.textContent.asNode()) ) {
logger.trace("Managing add textContent (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate is xml:listenedEventType
} else if ( newTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = newTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
logger.trace("On add, registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element /*+ " (" + elementNode + ")"*/);
// ((EventTarget) element).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.addEventListener(
newTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// if (elemsForEventType == null) {
// elemsForEventType = new HashSet<Element>();
// eventType2elements.put(eventType, elemsForEventType);
// ((EventTarget) document).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
// elemsForEventType.add(element);
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// if (eventTypesForElement == null) {
// eventTypesForElement = new HashSet<String>();
// element2eventTypes.put(element, eventTypesForElement);
// }
// eventTypesForElement.add(eventType);
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(newTriple.getPredicate())
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing add predicate is the childrenOrderedBy for some parent (" + newTriple + ")");
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
}
}
ExtendedIterator<Triple> deleteEventsIter =
update.getDeletedGraph().find(Node.ANY, Node.ANY, Node.ANY);
while (deleteEventsIter.hasNext()) {
Triple oldTriple = deleteEventsIter.next();
//org.w3c.dom.Node xmlSubj = nodeMapping.get(oldTriple.getSubject());
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(oldTriple.getSubject());
//System.out.println("Checking for " + oldTriple.getSubject() + " contained in " + sourceGraph);
if (domSubjs != null && graph.contains(oldTriple.getSubject(), RDF.type.asNode(), Node.ANY)) {
//System.out.println("Found " + oldTriple.getSubject() + " contained in " + sourceGraph);
//System.out.println("Managing " + oldTriple.getPredicate() + "/" + oldTriple.getObject());
Iterator<org.w3c.dom.Node> domSubjIter = domSubjs.iterator();
if ( ( oldTriple.getPredicate().equals(XML.nodeName.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeName.asNode(), Node.ANY) )
|| ( oldTriple.getPredicate().equals(XML.nodeType.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeType.asNode(), Node.ANY) ) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
switch (domSubj.getNodeType()) {
case org.w3c.dom.Node.ATTRIBUTE_NODE:
Attr oldAttr = (Attr) domSubj;
Element ownerElement = oldAttr.getOwnerElement();
ownerElement.removeAttributeNode(oldAttr);
newDom.removeSubtreeMapping(oldAttr);
break;
case org.w3c.dom.Node.DOCUMENT_NODE:
if ( oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
break;
default:
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
parentNode.removeChild(domSubj);
newDom.removeSubtreeMapping(domSubj);
}
}
}
} else if (
oldTriple.getPredicate().equals(XML.nodeValue.asNode())
&& !graph.contains(oldTriple.getSubject(), XML.nodeValue.asNode(), Node.ANY)) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
domSubj.setNodeValue("");
}
} else if (
predicateIsAttr(graph, oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), oldTriple.getPredicate(), Node.ANY) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
newDom.removeAttr(element, oldTriple.getPredicate());
}
// Set<org.w3c.dom.Node> domObjsOrig = graph2domNodeMapping.get(oldTriple.getObject());
// if (domObjsOrig != null) {
// Set<org.w3c.dom.Node> domObjs = new HashSet<org.w3c.dom.Node>();
// domObjs.addAll(domObjsOrig);
// while (domSubjIter.hasNext()) {
// Element element = (Element) domSubjIter.next();
// Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
// while (domObjsIter.hasNext()) {
// try {
// Attr oldAttr = (Attr) domObjsIter.next();
// if ( oldAttr.getNamespaceURI() == null
// ? element.hasAttribute(oldAttr.getName())
// : element.hasAttributeNS(oldAttr.getNamespaceURI(), oldAttr.getLocalName()))
// element.removeAttributeNode(oldAttr);
// newDom.removeSubtreeMapping(oldAttr);
// } catch(DOMException e) {
// if (!e.equals(DOMException.NOT_FOUND_ERR))
// throw e;
// }
// }
// }
// }
} else if ( oldTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
if ( domSubj.getNodeType() == org.w3c.dom.Node.DOCUMENT_NODE && oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
Set<org.w3c.dom.Node> domObjs = graph2domNodeMapping.get(oldTriple.getObject());
if (domObjs != null) {
Element element = (Element) domSubj;
Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
while (domObjsIter.hasNext()) {
try {
org.w3c.dom.Node domObj = domObjsIter.next();
removeChild(domObj, element);
newDom.removeSubtreeMapping(domObj);
} catch(DOMException e) {
if (!e.equals(DOMException.NOT_FOUND_ERR))
throw e;
}
}
}
}
// Predicate is orderKey
} else if (
oldTriple.getPredicate().equals(XML.orderKey.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.orderKey.asNode(), Node.ANY) ) {
logger.trace("Managing delete orderKey (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = oldTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& oldTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)
&& !update.getDeletedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderType (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (dom2descendingOrder.contains(node)) {
dom2descendingOrder.remove((Element) node);
reorder(true, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderedBy.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderedBy (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
// Predicate xml:textContent
} else if (
oldTriple.getPredicate().equals(XML.textContent.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.textContent.asNode(), Node.ANY)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete textContent (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
} else if ( oldTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = oldTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
// System.out.println("Registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element + " (" + elementNode + ")");
// ((EventTarget) element).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.removeEventListener(
oldTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// elemsForEventType.remove(element);
// if (elemsForEventType.isEmpty()) {
// eventType2elements.remove(eventType);
// ((EventTarget) document).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// eventTypesForElement.remove(eventType);
// if (eventTypesForElement.isEmpty()) {
// element2eventTypes.remove(element);
// }
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), childrenOrderProperty, Node.ANY)
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing delete predicate that is the childrenOrderedBy for some parent (" + oldTriple + ")");
reorderChild(node, (Element) parent, null);
}
}
}
}
// System.out.println("End of notifyEvents() in " + this);
}
dom2graphNodeMapping = newDom.dom2graphNodeMapping;
graph2domNodeMapping = newDom.graph2domNodeMapping;
// document = newDom.document;
}
});
}
logger.debug("End of Notify Update in DOM Decoder");
}
| public synchronized void notifyUpdate(final Graph sourceGraph, final GraphUpdate update) {
logger.debug("Begin of Notify Update in DOM Decoder");
if (!update.getAddedGraph().isEmpty() || !update.getDeletedGraph().isEmpty()) {
updatesContext.run(
new Runnable() {
@SuppressWarnings("unchecked")
public void run() {
ExtendedIterator<Triple> addEventsIter =
update.getAddedGraph().find(Node.ANY, Node.ANY, Node.ANY);
DomDecoder newDom = new DomDecoder(DomDecoder.this);
newDom.dom2graphNodeMapping =
(Map<org.w3c.dom.Node, Node>) ((HashMap<org.w3c.dom.Node, Node>) dom2graphNodeMapping).clone();
// newDom.graph2domNodeMapping = (Map<Node, Set<org.w3c.dom.Node>>) ((HashMap<Node, Set<org.w3c.dom.Node>>) graph2domNodeMapping).clone();
for (Node key : graph2domNodeMapping.keySet()) {
newDom.graph2domNodeMapping.put(key, (Set<org.w3c.dom.Node>) ((HashSet<org.w3c.dom.Node>) graph2domNodeMapping.get(key)).clone());
}
newDom.document = document;
// newDom.document = (Document) document.cloneNode(true);
while (addEventsIter.hasNext()) {
final Triple newTriple = addEventsIter.next();
// org.w3c.dom.Node xmlSubj = nodeMapping.get(newTriple.getSubject());
//System.out.println("Checking add event " + newTriple);
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(newTriple.getSubject());
// if (domSubjs == null)
// logger.warn(this + ": managing add event " + newTriple + ", domSubjs is null");
if (domSubjs != null) {
logger.trace("Managing add event " + newTriple + " for domSubjs " + domSubjs);
Set<org.w3c.dom.Node> domSubjsTemp = new HashSet<org.w3c.dom.Node>();
domSubjsTemp.addAll(domSubjs);
Iterator<org.w3c.dom.Node> domSubjIter = domSubjsTemp.iterator();
// Basic properties: DOM node must be recreated
if (newTriple.getPredicate().equals(RDF.type.asNode())
|| newTriple.getPredicate().equals(XML.nodeName.asNode())) {
//org.w3c.dom.Node parentNode = null;
Node nodeType = newTriple.getObject();
if ( nodeTypeIsElementType(graph,nodeType) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
org.w3c.dom.Node newNode = newDom.decodeNode(newTriple.getSubject());
parentNode.replaceChild(newNode, domSubj);
newDom.removeSubtreeMapping(domSubj);
// newDom.addNodeMapping(newTriple.getSubject(), newNode);
}
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is a DOM Attribute
} else if ( predicateIsAttr(graph, newTriple.getPredicate()) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
// Attr newAttr = newDom.decodeAttr(newTriple.getPredicate());
// newDom.addNodeMapping(newTriple.getPredicate(), newAttr);
setAttr(element, newTriple.getPredicate(), newTriple.getObject());
// newAttr.setValue(newTriple.getObject().getLiteralLexicalForm());
// element.setAttributeNodeNS(newAttr);
}
// Predicate is xml:hasChild
} else if ( newTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
Node nodeType =
graph
.find(newTriple.getSubject(), XML.nodeType.asNode(), Node.ANY)
.next().getObject();
logger.trace("Managing add hasChild (" + newTriple + ") for domSubjs " + domSubjs + " and node type " + nodeType);
if (nodeTypeIsElementType(graph,nodeType)) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
org.w3c.dom.Node newChild = newDom.decodeNode(newTriple.getObject());
// newDom.addNodeMapping(newTriple.getObject(), newChild);
// element.appendChild(newChild);
insertChildInOrder(newChild, newTriple.getObject(), element);
}
} else if (nodeIsRootDocument(graph,nodeType,newTriple.getSubject())) {
redecodeDocument(newTriple.getSubject());
return;
}
// Predicate is xml:nodeValue
} else if ( newTriple.getPredicate().equals(XML.nodeValue.asNode()) ) {
logger.trace("Managing add nodeValue (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
node.setNodeValue(newTriple.getObject().getLiteralLexicalForm());
}
// Predicate is orderKey
} else if ( newTriple.getPredicate().equals(XML.orderKey.asNode()) ) {
logger.trace("Managing add orderKey (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
newTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& newTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(newTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)) {
logger.trace("Managing add childrenOrderType (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (!dom2descendingOrder.contains(node)) {
dom2descendingOrder.add((Element) node);
reorder(false, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if ( newTriple.getPredicate().equals(XML.childrenOrderedBy.asNode()) ) {
logger.trace("Managing add childrenOrderedBy (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate textContent
} else if ( newTriple.getPredicate().equals(XML.textContent.asNode()) ) {
logger.trace("Managing add textContent (" + newTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(newTriple.getSubject(), (Element) node);
}
// Predicate is xml:listenedEventType
} else if ( newTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = newTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
logger.trace("On add, registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element /*+ " (" + elementNode + ")"*/);
// ((EventTarget) element).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.addEventListener(
newTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// if (elemsForEventType == null) {
// elemsForEventType = new HashSet<Element>();
// eventType2elements.put(eventType, elemsForEventType);
// ((EventTarget) document).addEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
// elemsForEventType.add(element);
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// if (eventTypesForElement == null) {
// eventTypesForElement = new HashSet<String>();
// element2eventTypes.put(element, eventTypesForElement);
// }
// eventTypesForElement.add(eventType);
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(newTriple.getPredicate())
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing add predicate is the childrenOrderedBy for some parent (" + newTriple + ")");
Node orderKeyNode = newTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
}
}
ExtendedIterator<Triple> deleteEventsIter =
update.getDeletedGraph().find(Node.ANY, Node.ANY, Node.ANY);
while (deleteEventsIter.hasNext()) {
Triple oldTriple = deleteEventsIter.next();
//org.w3c.dom.Node xmlSubj = nodeMapping.get(oldTriple.getSubject());
Set<org.w3c.dom.Node> domSubjs = graph2domNodeMapping.get(oldTriple.getSubject());
//System.out.println("Checking for " + oldTriple.getSubject() + " contained in " + sourceGraph);
if (domSubjs != null && graph.contains(oldTriple.getSubject(), RDF.type.asNode(), Node.ANY)) {
//System.out.println("Found " + oldTriple.getSubject() + " contained in " + sourceGraph);
//System.out.println("Managing " + oldTriple.getPredicate() + "/" + oldTriple.getObject());
Iterator<org.w3c.dom.Node> domSubjIter = domSubjs.iterator();
if ( ( oldTriple.getPredicate().equals(XML.nodeName.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeName.asNode(), Node.ANY) )
|| ( oldTriple.getPredicate().equals(XML.nodeType.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.nodeType.asNode(), Node.ANY) ) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
switch (domSubj.getNodeType()) {
case org.w3c.dom.Node.ATTRIBUTE_NODE:
Attr oldAttr = (Attr) domSubj;
Element ownerElement = oldAttr.getOwnerElement();
ownerElement.removeAttributeNode(oldAttr);
newDom.removeSubtreeMapping(oldAttr);
break;
case org.w3c.dom.Node.DOCUMENT_NODE:
if ( oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
break;
default:
org.w3c.dom.Node parentNode = domSubj.getParentNode();
if (parentNode != null) {
parentNode.removeChild(domSubj);
newDom.removeSubtreeMapping(domSubj);
}
}
}
} else if (
oldTriple.getPredicate().equals(XML.nodeValue.asNode())
&& !graph.contains(oldTriple.getSubject(), XML.nodeValue.asNode(), Node.ANY)) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
domSubj.setNodeValue("");
}
} else if (
predicateIsAttr(graph, oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), oldTriple.getPredicate(), Node.ANY) ) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
newDom.removeAttr(element, oldTriple.getPredicate());
}
// Set<org.w3c.dom.Node> domObjsOrig = graph2domNodeMapping.get(oldTriple.getObject());
// if (domObjsOrig != null) {
// Set<org.w3c.dom.Node> domObjs = new HashSet<org.w3c.dom.Node>();
// domObjs.addAll(domObjsOrig);
// while (domSubjIter.hasNext()) {
// Element element = (Element) domSubjIter.next();
// Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
// while (domObjsIter.hasNext()) {
// try {
// Attr oldAttr = (Attr) domObjsIter.next();
// if ( oldAttr.getNamespaceURI() == null
// ? element.hasAttribute(oldAttr.getName())
// : element.hasAttributeNS(oldAttr.getNamespaceURI(), oldAttr.getLocalName()))
// element.removeAttributeNode(oldAttr);
// newDom.removeSubtreeMapping(oldAttr);
// } catch(DOMException e) {
// if (!e.equals(DOMException.NOT_FOUND_ERR))
// throw e;
// }
// }
// }
// }
} else if ( oldTriple.getPredicate().equals(XML.hasChild.asNode()) ) {
while (domSubjIter.hasNext()) {
org.w3c.dom.Node domSubj = domSubjIter.next();
if ( domSubj.getNodeType() == org.w3c.dom.Node.DOCUMENT_NODE && oldTriple.getSubject().equals(docRootNode) ) {
redecodeDocument();
return;
}
Set<org.w3c.dom.Node> domObjs = graph2domNodeMapping.get(oldTriple.getObject());
if (domObjs != null) {
Element element = (Element) domSubj;
Iterator<org.w3c.dom.Node> domObjsIter = domObjs.iterator();
while (domObjsIter.hasNext()) {
try {
org.w3c.dom.Node domObj = domObjsIter.next();
removeChild(domObj, element);
newDom.removeSubtreeMapping(domObj);
} catch(DOMException e) {
if (!e.equals(DOMException.NOT_FOUND_ERR))
throw e;
}
}
}
}
// Predicate is orderKey
} else if (
oldTriple.getPredicate().equals(XML.orderKey.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.orderKey.asNode(), Node.ANY) ) {
logger.trace("Managing delete orderKey (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty == null || childrenOrderProperty.equals(XML.orderKey.asNode()) ) {
Node orderKeyNode = oldTriple.getObject();
reorderChild(node, (Element) parent, orderKeyNode);
}
}
}
// Predicate xml:childrenOrderType and object xml:Descending
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderType.asNode())
&& oldTriple.getObject().equals(XML.Descending)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY)
&& !update.getDeletedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderType (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element) {
NavigableMap<Node, Vector<org.w3c.dom.Node>> orderedByKeyChildren =
dom2orderedByKeyChildren.get(node);
if (dom2descendingOrder.contains(node)) {
dom2descendingOrder.remove((Element) node);
reorder(true, (Element) node, orderedByKeyChildren);
}
}
}
// Predicate xml:childrenOrderedBy
} else if (
oldTriple.getPredicate().equals(XML.childrenOrderedBy.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete childrenOrderedBy (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
// Predicate xml:textContent
} else if (
oldTriple.getPredicate().equals(XML.textContent.asNode())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.textContent.asNode(), Node.ANY)
&& !update.getAddedGraph().contains(oldTriple.getSubject(), XML.childrenOrderedBy.asNode(), Node.ANY) ) {
logger.trace("Managing delete textContent (" + oldTriple + ") for domSubjs " + domSubjs);
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
if (node instanceof Element)
setupElementChildren(oldTriple.getSubject(), (Element) node);
}
} else if ( oldTriple.getPredicate().equals(XML.listenedEventType.asNode()) ) {
Node eventTypeNode = oldTriple.getObject();
if (eventTypeNode.isLiteral()) {
while (domSubjIter.hasNext()) {
Element element = (Element) domSubjIter.next();
// System.out.println("Registering eventListener for type " + eventTypeNode.getLiteralLexicalForm() + " in element " + element + " (" + elementNode + ")");
// ((EventTarget) element).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
eventManager.removeEventListener(
oldTriple.getSubject(),
element,
eventTypeNode.getLiteralLexicalForm(),
DomDecoder.this, false);
// Set<Element> elemsForEventType = eventType2elements.get(eventType);
// elemsForEventType.remove(element);
// if (elemsForEventType.isEmpty()) {
// eventType2elements.remove(eventType);
// ((EventTarget) document).removeEventListener(eventTypeNode.getLiteralLexicalForm(), DomDecoder2.this, false);
// }
//
// Set<String> eventTypesForElement = element2eventTypes.get(element);
// eventTypesForElement.remove(eventType);
// if (eventTypesForElement.isEmpty()) {
// element2eventTypes.remove(element);
// }
}
}
}
// Predicate is the childrenOrderedBy for some parent
while (domSubjIter.hasNext()) {
org.w3c.dom.Node node = domSubjIter.next();
org.w3c.dom.Node parent = node.getParentNode();
if ( parent != null
&& parent instanceof Element ) {
Node childrenOrderProperty = dom2childrenOrderProperty.get((Element) parent);
if ( childrenOrderProperty != null
&& childrenOrderProperty.equals(oldTriple.getPredicate())
&& !update.getAddedGraph().contains(oldTriple.getSubject(), childrenOrderProperty, Node.ANY)
&& !mustReorderAllChildrenOf((Element) parent, update) ) {
logger.trace("Managing delete predicate that is the childrenOrderedBy for some parent (" + oldTriple + ")");
reorderChild(node, (Element) parent, null);
}
}
}
}
// System.out.println("End of notifyEvents() in " + this);
}
dom2graphNodeMapping = newDom.dom2graphNodeMapping;
graph2domNodeMapping = newDom.graph2domNodeMapping;
// document = newDom.document;
}
});
}
logger.debug("End of Notify Update in DOM Decoder");
}
|
diff --git a/carrot2/applications/carrot2-demo-browser/src/org/carrot2/demo/settings/LuceneSettingsDialog.java b/carrot2/applications/carrot2-demo-browser/src/org/carrot2/demo/settings/LuceneSettingsDialog.java
index 99b70a8c7..06e8a5eb2 100644
--- a/carrot2/applications/carrot2-demo-browser/src/org/carrot2/demo/settings/LuceneSettingsDialog.java
+++ b/carrot2/applications/carrot2-demo-browser/src/org/carrot2/demo/settings/LuceneSettingsDialog.java
@@ -1,200 +1,200 @@
/*
* Carrot2 project.
*
* Copyright (C) 2002-2007, Dawid Weiss, Stanisław Osiński.
* Portions (C) Contributors listed in "carrot2.CONTRIBUTORS" file.
* All rights reserved.
*
* Refer to the full license file "carrot2.LICENSE"
* in the root folder of the repository checkout or at:
* http://www.carrot2.org/carrot2.LICENSE
*/
package org.carrot2.demo.settings;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import javax.swing.*;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReader.FieldOption;
import org.carrot2.demo.swing.SwingUtils;
import org.carrot2.input.lucene.StandardAnalyzerWithPorterStemmer;
import com.jgoodies.forms.builder.DefaultFormBuilder;
import com.jgoodies.forms.layout.FormLayout;
/**
* Visual component for {@link LuceneSettings}.
*
* @author Dawid Weiss
*/
public class LuceneSettingsDialog extends JPanel {
private final transient LuceneSettings settings;
private transient JTextField indexLocationLabel;
public LuceneSettingsDialog(LuceneSettings settings) {
this.settings = settings;
buildGui();
}
private void buildGui() {
this.setLayout(new BorderLayout());
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("fill:200px:grow, 4dlu, pref"));
builder.appendSeparator("Lucene index location");
this.indexLocationLabel = new JTextField();
this.indexLocationLabel.setEditable(false);
if (settings.luceneIndexDir != null) {
this.indexLocationLabel.setText(
settings.luceneIndexDir.getAbsolutePath());
}
final JButton indexLocationEditButton = new JButton();
indexLocationEditButton.setText("Edit");
indexLocationEditButton.addActionListener(
new ActionListener() {
public void actionPerformed(ActionEvent event) {
final JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new File(indexLocationLabel.getText()));
chooser.setDialogType(JFileChooser.OPEN_DIALOG);
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setMultiSelectionEnabled(false);
if (chooser.showOpenDialog(indexLocationEditButton) == JFileChooser.APPROVE_OPTION) {
final File luceneIndexDir = chooser.getSelectedFile();
// verify if it's lucene's index.
final Collection fields;
try {
IndexReader reader = IndexReader.open(luceneIndexDir);
fields = reader.getFieldNames(FieldOption.ALL);
reader.close();
} catch (IOException e) {
SwingUtils.showExceptionDialog(indexLocationEditButton,
"Could not open Lucene index.", e);
return;
}
// show details dialog.
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("pref:grow"));
builder.appendSeparator("Search fields");
final Object [] fieldsList = fields.toArray();
final JList list = new JList(fieldsList);
list.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
if (settings.searchFields != null) {
for (int i = 0; i < settings.searchFields.length; i++) {
final String key = settings.searchFields[i];
for (int j = 0; j < fieldsList.length; j++) {
if (fieldsList[j].equals(key)) {
list.addSelectionInterval(j, j);
}
}
}
}
- builder.append(list);
+ builder.append(new JScrollPane(list));
builder.nextLine();
builder.appendSeparator("Results fields");
builder.append(new JLabel("URL:"));
final JComboBox url = new JComboBox(fields.toArray());
if (settings.urlField != null) {
url.setSelectedItem(settings.urlField);
}
builder.append(url);
builder.nextLine();
builder.append(new JLabel("Title:"));
final JComboBox title = new JComboBox(fields.toArray());
if (settings.titleField != null) {
title.setSelectedItem(settings.titleField);
}
builder.append(title);
builder.nextLine();
builder.append(new JLabel("Snippet:"));
final JComboBox snippet = new JComboBox(fields.toArray());
if (settings.summaryField != null) {
snippet.setSelectedItem(settings.summaryField);
}
builder.append(snippet);
builder.nextLine();
builder.appendSeparator("Analyzer");
final JComboBox analyzers = new JComboBox(new Object [] {
StandardAnalyzerWithPorterStemmer.class.getName(),
StandardAnalyzer.class.getName(),
SimpleAnalyzer.class.getName(),
jeasy.analysis.MMAnalyzer.class.getName()
});
if (settings.analyzer != null) {
analyzers.setSelectedItem(settings.analyzer.getClass().getName());
}
builder.append(analyzers);
builder.nextLine();
builder.appendSeparator("Results postprocessing");
final JCheckBox createSnippets = new JCheckBox("Create snippets");
createSnippets.setSelected(settings.createSnippets);
builder.append(createSnippets);
builder.nextLine();
final int result = JOptionPane.showConfirmDialog(indexLocationEditButton, builder.getPanel(), "Select fields",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE);
if (result == JOptionPane.OK_OPTION) {
indexLocationLabel.setText(luceneIndexDir.getAbsolutePath());
final String [] searchFields = (String []) Arrays.asList(
list.getSelectedValues()).toArray(
new String [list.getSelectedIndices().length]);
if (searchFields.length == 0) {
JOptionPane.showMessageDialog(indexLocationEditButton, "At least one search field is required.");
return;
}
final Analyzer analyzer;
try {
analyzer = (Analyzer) Thread.currentThread().getContextClassLoader().loadClass(
(String) analyzers.getSelectedItem()).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
settings.setConfig(
luceneIndexDir,
searchFields,
(String) url.getSelectedItem(),
(String) title.getSelectedItem(),
(String) snippet.getSelectedItem(),
analyzer, createSnippets.isSelected());
}
}
}
}
);
builder.append(indexLocationLabel);
builder.append(indexLocationEditButton);
builder.nextLine();
this.add(builder.getPanel(), BorderLayout.CENTER);
}
}
| true | true | private void buildGui() {
this.setLayout(new BorderLayout());
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("fill:200px:grow, 4dlu, pref"));
builder.appendSeparator("Lucene index location");
this.indexLocationLabel = new JTextField();
this.indexLocationLabel.setEditable(false);
if (settings.luceneIndexDir != null) {
this.indexLocationLabel.setText(
settings.luceneIndexDir.getAbsolutePath());
}
final JButton indexLocationEditButton = new JButton();
indexLocationEditButton.setText("Edit");
indexLocationEditButton.addActionListener(
new ActionListener() {
public void actionPerformed(ActionEvent event) {
final JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new File(indexLocationLabel.getText()));
chooser.setDialogType(JFileChooser.OPEN_DIALOG);
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setMultiSelectionEnabled(false);
if (chooser.showOpenDialog(indexLocationEditButton) == JFileChooser.APPROVE_OPTION) {
final File luceneIndexDir = chooser.getSelectedFile();
// verify if it's lucene's index.
final Collection fields;
try {
IndexReader reader = IndexReader.open(luceneIndexDir);
fields = reader.getFieldNames(FieldOption.ALL);
reader.close();
} catch (IOException e) {
SwingUtils.showExceptionDialog(indexLocationEditButton,
"Could not open Lucene index.", e);
return;
}
// show details dialog.
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("pref:grow"));
builder.appendSeparator("Search fields");
final Object [] fieldsList = fields.toArray();
final JList list = new JList(fieldsList);
list.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
if (settings.searchFields != null) {
for (int i = 0; i < settings.searchFields.length; i++) {
final String key = settings.searchFields[i];
for (int j = 0; j < fieldsList.length; j++) {
if (fieldsList[j].equals(key)) {
list.addSelectionInterval(j, j);
}
}
}
}
builder.append(list);
builder.nextLine();
builder.appendSeparator("Results fields");
builder.append(new JLabel("URL:"));
final JComboBox url = new JComboBox(fields.toArray());
if (settings.urlField != null) {
url.setSelectedItem(settings.urlField);
}
builder.append(url);
builder.nextLine();
builder.append(new JLabel("Title:"));
final JComboBox title = new JComboBox(fields.toArray());
if (settings.titleField != null) {
title.setSelectedItem(settings.titleField);
}
builder.append(title);
builder.nextLine();
builder.append(new JLabel("Snippet:"));
final JComboBox snippet = new JComboBox(fields.toArray());
if (settings.summaryField != null) {
snippet.setSelectedItem(settings.summaryField);
}
builder.append(snippet);
builder.nextLine();
builder.appendSeparator("Analyzer");
final JComboBox analyzers = new JComboBox(new Object [] {
StandardAnalyzerWithPorterStemmer.class.getName(),
StandardAnalyzer.class.getName(),
SimpleAnalyzer.class.getName(),
jeasy.analysis.MMAnalyzer.class.getName()
});
if (settings.analyzer != null) {
analyzers.setSelectedItem(settings.analyzer.getClass().getName());
}
builder.append(analyzers);
builder.nextLine();
builder.appendSeparator("Results postprocessing");
final JCheckBox createSnippets = new JCheckBox("Create snippets");
createSnippets.setSelected(settings.createSnippets);
builder.append(createSnippets);
builder.nextLine();
final int result = JOptionPane.showConfirmDialog(indexLocationEditButton, builder.getPanel(), "Select fields",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE);
if (result == JOptionPane.OK_OPTION) {
indexLocationLabel.setText(luceneIndexDir.getAbsolutePath());
final String [] searchFields = (String []) Arrays.asList(
list.getSelectedValues()).toArray(
new String [list.getSelectedIndices().length]);
if (searchFields.length == 0) {
JOptionPane.showMessageDialog(indexLocationEditButton, "At least one search field is required.");
return;
}
final Analyzer analyzer;
try {
analyzer = (Analyzer) Thread.currentThread().getContextClassLoader().loadClass(
(String) analyzers.getSelectedItem()).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
settings.setConfig(
luceneIndexDir,
searchFields,
(String) url.getSelectedItem(),
(String) title.getSelectedItem(),
(String) snippet.getSelectedItem(),
analyzer, createSnippets.isSelected());
}
}
}
}
);
builder.append(indexLocationLabel);
builder.append(indexLocationEditButton);
builder.nextLine();
this.add(builder.getPanel(), BorderLayout.CENTER);
}
| private void buildGui() {
this.setLayout(new BorderLayout());
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("fill:200px:grow, 4dlu, pref"));
builder.appendSeparator("Lucene index location");
this.indexLocationLabel = new JTextField();
this.indexLocationLabel.setEditable(false);
if (settings.luceneIndexDir != null) {
this.indexLocationLabel.setText(
settings.luceneIndexDir.getAbsolutePath());
}
final JButton indexLocationEditButton = new JButton();
indexLocationEditButton.setText("Edit");
indexLocationEditButton.addActionListener(
new ActionListener() {
public void actionPerformed(ActionEvent event) {
final JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(new File(indexLocationLabel.getText()));
chooser.setDialogType(JFileChooser.OPEN_DIALOG);
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
chooser.setMultiSelectionEnabled(false);
if (chooser.showOpenDialog(indexLocationEditButton) == JFileChooser.APPROVE_OPTION) {
final File luceneIndexDir = chooser.getSelectedFile();
// verify if it's lucene's index.
final Collection fields;
try {
IndexReader reader = IndexReader.open(luceneIndexDir);
fields = reader.getFieldNames(FieldOption.ALL);
reader.close();
} catch (IOException e) {
SwingUtils.showExceptionDialog(indexLocationEditButton,
"Could not open Lucene index.", e);
return;
}
// show details dialog.
final DefaultFormBuilder builder =
new DefaultFormBuilder(new FormLayout("pref:grow"));
builder.appendSeparator("Search fields");
final Object [] fieldsList = fields.toArray();
final JList list = new JList(fieldsList);
list.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
if (settings.searchFields != null) {
for (int i = 0; i < settings.searchFields.length; i++) {
final String key = settings.searchFields[i];
for (int j = 0; j < fieldsList.length; j++) {
if (fieldsList[j].equals(key)) {
list.addSelectionInterval(j, j);
}
}
}
}
builder.append(new JScrollPane(list));
builder.nextLine();
builder.appendSeparator("Results fields");
builder.append(new JLabel("URL:"));
final JComboBox url = new JComboBox(fields.toArray());
if (settings.urlField != null) {
url.setSelectedItem(settings.urlField);
}
builder.append(url);
builder.nextLine();
builder.append(new JLabel("Title:"));
final JComboBox title = new JComboBox(fields.toArray());
if (settings.titleField != null) {
title.setSelectedItem(settings.titleField);
}
builder.append(title);
builder.nextLine();
builder.append(new JLabel("Snippet:"));
final JComboBox snippet = new JComboBox(fields.toArray());
if (settings.summaryField != null) {
snippet.setSelectedItem(settings.summaryField);
}
builder.append(snippet);
builder.nextLine();
builder.appendSeparator("Analyzer");
final JComboBox analyzers = new JComboBox(new Object [] {
StandardAnalyzerWithPorterStemmer.class.getName(),
StandardAnalyzer.class.getName(),
SimpleAnalyzer.class.getName(),
jeasy.analysis.MMAnalyzer.class.getName()
});
if (settings.analyzer != null) {
analyzers.setSelectedItem(settings.analyzer.getClass().getName());
}
builder.append(analyzers);
builder.nextLine();
builder.appendSeparator("Results postprocessing");
final JCheckBox createSnippets = new JCheckBox("Create snippets");
createSnippets.setSelected(settings.createSnippets);
builder.append(createSnippets);
builder.nextLine();
final int result = JOptionPane.showConfirmDialog(indexLocationEditButton, builder.getPanel(), "Select fields",
JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE);
if (result == JOptionPane.OK_OPTION) {
indexLocationLabel.setText(luceneIndexDir.getAbsolutePath());
final String [] searchFields = (String []) Arrays.asList(
list.getSelectedValues()).toArray(
new String [list.getSelectedIndices().length]);
if (searchFields.length == 0) {
JOptionPane.showMessageDialog(indexLocationEditButton, "At least one search field is required.");
return;
}
final Analyzer analyzer;
try {
analyzer = (Analyzer) Thread.currentThread().getContextClassLoader().loadClass(
(String) analyzers.getSelectedItem()).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
settings.setConfig(
luceneIndexDir,
searchFields,
(String) url.getSelectedItem(),
(String) title.getSelectedItem(),
(String) snippet.getSelectedItem(),
analyzer, createSnippets.isSelected());
}
}
}
}
);
builder.append(indexLocationLabel);
builder.append(indexLocationEditButton);
builder.nextLine();
this.add(builder.getPanel(), BorderLayout.CENTER);
}
|
diff --git a/swing-application-impl/src/main/java/org/cytoscape/internal/view/TreeCellRenderer.java b/swing-application-impl/src/main/java/org/cytoscape/internal/view/TreeCellRenderer.java
index d0ab0df52..3e9e9ebbb 100644
--- a/swing-application-impl/src/main/java/org/cytoscape/internal/view/TreeCellRenderer.java
+++ b/swing-application-impl/src/main/java/org/cytoscape/internal/view/TreeCellRenderer.java
@@ -1,83 +1,88 @@
package org.cytoscape.internal.view;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Image;
import java.awt.Toolkit;
import javax.swing.ImageIcon;
import javax.swing.JTree;
import javax.swing.tree.DefaultTreeCellRenderer;
import org.cytoscape.model.CyNetwork;
import org.cytoscape.util.swing.JTreeTable;
final class TreeCellRenderer extends DefaultTreeCellRenderer {
private final static long serialVersionUID = 1213748836751014L;
private static final String NETWORK_ICON = "/images/network_16.png";
private static final String NETWORK_LEAF_ICON = "/images/blank_icon_16.png";
private static final Font TABLE_FONT = new Font("SansSerif", Font.PLAIN, 12);
private static final Font TABLE_FONT_SELECTED = new Font("SansSerif", Font.BOLD, 12);
private static final Dimension CELL_SIZE = new Dimension(1200, 40);
private final JTreeTable treeTable;
TreeCellRenderer(final JTreeTable treeTable) {
this.treeTable = treeTable;
final Image iconImage = Toolkit.getDefaultToolkit().getImage(getClass().getResource(NETWORK_ICON));
final ImageIcon icon = new ImageIcon(iconImage);
// If we don't provide a leaf Icon, a default one will be used.
final Image iconImageLeaf = Toolkit.getDefaultToolkit().getImage(getClass().getResource(NETWORK_LEAF_ICON));
final ImageIcon iconLeaf = new ImageIcon(iconImageLeaf);
this.setClosedIcon(icon);
this.setOpenIcon(icon);
this.setLeafIcon(iconLeaf);
}
@Override
public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded,
boolean leaf, int row, boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus);
this.setPreferredSize(CELL_SIZE);
this.setSize(CELL_SIZE);
if (value instanceof NetworkTreeNode == false)
return this;
final NetworkTreeNode treeNode = (NetworkTreeNode) value;
this.setForeground(NetworkPanel.FONT_COLOR);
treeTable.setForeground(NetworkPanel.FONT_COLOR);
this.setBackground(treeTable.getBackground());
this.setBackgroundSelectionColor(treeTable.getSelectionBackground());
if (selected) {
this.setFont(TABLE_FONT_SELECTED);
this.setBackgroundSelectionColor(Color.lightGray);
}
else {
this.setFont(TABLE_FONT);
this.setBackgroundNonSelectionColor(Color.white);
}
if(treeNode.getNetwork() == null) {
setForeground(treeTable.getForeground());
return this;
}
setForeground(treeNode.getNodeColor());
- setToolTipText(treeNode.getNetwork().getRow(treeNode.getNetwork()).get(CyNetwork.NAME, String.class));
+ try {
+ setToolTipText(treeNode.getNetwork().getRow(treeNode.getNetwork()).get(CyNetwork.NAME, String.class));
+ } catch (NullPointerException e) {
+ // It's possible that the network got deleted but we haven't been
+ // notified yet.
+ }
return this;
}
}
| true | true | public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded,
boolean leaf, int row, boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus);
this.setPreferredSize(CELL_SIZE);
this.setSize(CELL_SIZE);
if (value instanceof NetworkTreeNode == false)
return this;
final NetworkTreeNode treeNode = (NetworkTreeNode) value;
this.setForeground(NetworkPanel.FONT_COLOR);
treeTable.setForeground(NetworkPanel.FONT_COLOR);
this.setBackground(treeTable.getBackground());
this.setBackgroundSelectionColor(treeTable.getSelectionBackground());
if (selected) {
this.setFont(TABLE_FONT_SELECTED);
this.setBackgroundSelectionColor(Color.lightGray);
}
else {
this.setFont(TABLE_FONT);
this.setBackgroundNonSelectionColor(Color.white);
}
if(treeNode.getNetwork() == null) {
setForeground(treeTable.getForeground());
return this;
}
setForeground(treeNode.getNodeColor());
setToolTipText(treeNode.getNetwork().getRow(treeNode.getNetwork()).get(CyNetwork.NAME, String.class));
return this;
}
| public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded,
boolean leaf, int row, boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus);
this.setPreferredSize(CELL_SIZE);
this.setSize(CELL_SIZE);
if (value instanceof NetworkTreeNode == false)
return this;
final NetworkTreeNode treeNode = (NetworkTreeNode) value;
this.setForeground(NetworkPanel.FONT_COLOR);
treeTable.setForeground(NetworkPanel.FONT_COLOR);
this.setBackground(treeTable.getBackground());
this.setBackgroundSelectionColor(treeTable.getSelectionBackground());
if (selected) {
this.setFont(TABLE_FONT_SELECTED);
this.setBackgroundSelectionColor(Color.lightGray);
}
else {
this.setFont(TABLE_FONT);
this.setBackgroundNonSelectionColor(Color.white);
}
if(treeNode.getNetwork() == null) {
setForeground(treeTable.getForeground());
return this;
}
setForeground(treeNode.getNodeColor());
try {
setToolTipText(treeNode.getNetwork().getRow(treeNode.getNetwork()).get(CyNetwork.NAME, String.class));
} catch (NullPointerException e) {
// It's possible that the network got deleted but we haven't been
// notified yet.
}
return this;
}
|
diff --git a/modules/cpr/src/main/java/org/atmosphere/interceptor/OnDisconnectInterceptor.java b/modules/cpr/src/main/java/org/atmosphere/interceptor/OnDisconnectInterceptor.java
index c1d0885d8..731b3ffb3 100644
--- a/modules/cpr/src/main/java/org/atmosphere/interceptor/OnDisconnectInterceptor.java
+++ b/modules/cpr/src/main/java/org/atmosphere/interceptor/OnDisconnectInterceptor.java
@@ -1,75 +1,75 @@
/*
* Copyright 2013 Jeanfrancois Arcand
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.atmosphere.interceptor;
import org.atmosphere.cpr.Action;
import org.atmosphere.cpr.AsynchronousProcessor;
import org.atmosphere.cpr.AtmosphereConfig;
import org.atmosphere.cpr.AtmosphereInterceptorAdapter;
import org.atmosphere.cpr.AtmosphereRequest;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEventImpl;
import org.atmosphere.cpr.AtmosphereResourceFactory;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.HeaderConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.atmosphere.cpr.FrameworkConfig.ASYNCHRONOUS_HOOK;
/**
* When the browser close the connection, the atmosphere.js will send an unsubscribe message to tell
* framework the browser is disconnecting.
*
* @author Jeanfrancois Arcand
*/
public class OnDisconnectInterceptor extends AtmosphereInterceptorAdapter {
private final Logger logger = LoggerFactory.getLogger(OnDisconnectInterceptor.class);
private AsynchronousProcessor p;
@Override
public void configure(AtmosphereConfig config) {
if (AsynchronousProcessor.class.isAssignableFrom(config.framework().getAsyncSupport().getClass())) {
p = AsynchronousProcessor.class.cast(config.framework().getAsyncSupport());
}
}
@Override
public Action inspect(final AtmosphereResource r) {
AtmosphereRequest request = AtmosphereResourceImpl.class.cast(r).getRequest(false);
String s = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRANSPORT);
String uuid = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRACKING_ID);
if (p != null && s != null && uuid != null && s.equalsIgnoreCase(HeaderConfig.DISCONNECT)) {
logger.trace("AtmosphereResource {} disconnected", uuid);
AtmosphereResource ss = AtmosphereResourceFactory.getDefault().find(uuid);
if (ss != null) {
// Block websocket closing detection
- ss.getRequest().setAttribute(ASYNCHRONOUS_HOOK, null);
+ AtmosphereResourceImpl.class.cast(ss).getRequest(false).setAttribute(ASYNCHRONOUS_HOOK, null);
AtmosphereResourceEventImpl.class.cast(ss.getAtmosphereResourceEvent()).isClosedByClient(true);
p.completeLifecycle(ss, false);
}
return Action.CANCELLED;
}
return Action.CONTINUE;
}
public String toString() {
return "Browser disconnection detection";
}
}
| true | true | public Action inspect(final AtmosphereResource r) {
AtmosphereRequest request = AtmosphereResourceImpl.class.cast(r).getRequest(false);
String s = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRANSPORT);
String uuid = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRACKING_ID);
if (p != null && s != null && uuid != null && s.equalsIgnoreCase(HeaderConfig.DISCONNECT)) {
logger.trace("AtmosphereResource {} disconnected", uuid);
AtmosphereResource ss = AtmosphereResourceFactory.getDefault().find(uuid);
if (ss != null) {
// Block websocket closing detection
ss.getRequest().setAttribute(ASYNCHRONOUS_HOOK, null);
AtmosphereResourceEventImpl.class.cast(ss.getAtmosphereResourceEvent()).isClosedByClient(true);
p.completeLifecycle(ss, false);
}
return Action.CANCELLED;
}
return Action.CONTINUE;
}
| public Action inspect(final AtmosphereResource r) {
AtmosphereRequest request = AtmosphereResourceImpl.class.cast(r).getRequest(false);
String s = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRANSPORT);
String uuid = request.getHeader(HeaderConfig.X_ATMOSPHERE_TRACKING_ID);
if (p != null && s != null && uuid != null && s.equalsIgnoreCase(HeaderConfig.DISCONNECT)) {
logger.trace("AtmosphereResource {} disconnected", uuid);
AtmosphereResource ss = AtmosphereResourceFactory.getDefault().find(uuid);
if (ss != null) {
// Block websocket closing detection
AtmosphereResourceImpl.class.cast(ss).getRequest(false).setAttribute(ASYNCHRONOUS_HOOK, null);
AtmosphereResourceEventImpl.class.cast(ss.getAtmosphereResourceEvent()).isClosedByClient(true);
p.completeLifecycle(ss, false);
}
return Action.CANCELLED;
}
return Action.CONTINUE;
}
|
diff --git a/Slick/src/org/newdawn/slick/util/pathfinding/AStarPathFinder.java b/Slick/src/org/newdawn/slick/util/pathfinding/AStarPathFinder.java
index e1442ad..4bbcae0 100644
--- a/Slick/src/org/newdawn/slick/util/pathfinding/AStarPathFinder.java
+++ b/Slick/src/org/newdawn/slick/util/pathfinding/AStarPathFinder.java
@@ -1,568 +1,568 @@
package org.newdawn.slick.util.pathfinding;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.newdawn.slick.util.pathfinding.heuristics.ClosestHeuristic;
/**
* A path finder implementation that uses the AStar heuristic based algorithm
* to determine a path.
*
* @author Kevin Glass
*/
public class AStarPathFinder implements PathFinder, PathFindingContext {
/** The set of nodes that have been searched through */
private ArrayList closed = new ArrayList();
/** The set of nodes that we do not yet consider fully searched */
private PriorityList open = new PriorityList();
/** The map being searched */
private TileBasedMap map;
/** The maximum depth of search we're willing to accept before giving up */
private int maxSearchDistance;
/** The complete set of nodes across the map */
private Node[][] nodes;
/** True if we allow diaganol movement */
private boolean allowDiagMovement;
/** The heuristic we're applying to determine which nodes to search first */
private AStarHeuristic heuristic;
/** The node we're currently searching from */
private Node current;
/** The mover going through the path */
private Mover mover;
/** The x coordinate of the source tile we're moving from */
private int sourceX;
/** The y coordinate of the source tile we're moving from */
private int sourceY;
/** The distance searched so far */
private int distance;
/**
* Create a path finder with the default heuristic - closest to target.
*
* @param map The map to be searched
* @param maxSearchDistance The maximum depth we'll search before giving up
* @param allowDiagMovement True if the search should try diaganol movement
*/
public AStarPathFinder(TileBasedMap map, int maxSearchDistance, boolean allowDiagMovement) {
this(map, maxSearchDistance, allowDiagMovement, new ClosestHeuristic());
}
/**
* Create a path finder
*
* @param heuristic The heuristic used to determine the search order of the map
* @param map The map to be searched
* @param maxSearchDistance The maximum depth we'll search before giving up
* @param allowDiagMovement True if the search should try diaganol movement
*/
public AStarPathFinder(TileBasedMap map, int maxSearchDistance,
boolean allowDiagMovement, AStarHeuristic heuristic) {
this.heuristic = heuristic;
this.map = map;
this.maxSearchDistance = maxSearchDistance;
this.allowDiagMovement = allowDiagMovement;
nodes = new Node[map.getWidthInTiles()][map.getHeightInTiles()];
for (int x=0;x<map.getWidthInTiles();x++) {
for (int y=0;y<map.getHeightInTiles();y++) {
nodes[x][y] = new Node(x,y);
}
}
}
/**
* @see PathFinder#findPath(Mover, int, int, int, int)
*/
public Path findPath(Mover mover, int sx, int sy, int tx, int ty) {
current = null;
// easy first check, if the destination is blocked, we can't get there
this.mover = mover;
- this.sourceX = sx;
- this.sourceY = sy;
+ this.sourceX = tx;
+ this.sourceY = ty;
this.distance = 0;
if (map.blocked(this, tx, ty)) {
return null;
}
for (int x=0;x<map.getWidthInTiles();x++) {
for (int y=0;y<map.getHeightInTiles();y++) {
nodes[x][y].reset();
}
}
// initial state for A*. The closed group is empty. Only the starting
// tile is in the open list and it's cost is zero, i.e. we're already there
nodes[sx][sy].cost = 0;
nodes[sx][sy].depth = 0;
closed.clear();
open.clear();
addToOpen(nodes[sx][sy]);
nodes[tx][ty].parent = null;
// while we haven't found the goal and haven't exceeded our max search depth
int maxDepth = 0;
while ((maxDepth < maxSearchDistance) && (open.size() != 0)) {
// pull out the first node in our open list, this is determined to
// be the most likely to be the next step based on our heuristic
current = getFirstInOpen();
if (current == nodes[tx][ty]) {
break;
}
distance = current.depth;
removeFromOpen(current);
addToClosed(current);
// search through all the neighbours of the current node evaluating
// them as next steps
for (int x=-1;x<2;x++) {
for (int y=-1;y<2;y++) {
// not a neighbour, its the current tile
if ((x == 0) && (y == 0)) {
continue;
}
// if we're not allowing diaganol movement then only
// one of x or y can be set
if (!allowDiagMovement) {
if ((x != 0) && (y != 0)) {
continue;
}
}
// determine the location of the neighbour and evaluate it
int xp = x + current.x;
int yp = y + current.y;
- if (isValidLocation(mover,sx,sy,xp,yp)) {
+ if (isValidLocation(mover,current.x,current.y,xp,yp)) {
// the cost to get to this node is cost the current plus the movement
// cost to reach this node. Note that the heursitic value is only used
// in the sorted open list
float nextStepCost = current.cost + getMovementCost(mover, current.x, current.y, xp, yp);
Node neighbour = nodes[xp][yp];
map.pathFinderVisited(xp, yp);
// if the new cost we've determined for this node is lower than
// it has been previously makes sure the node hasn't been discarded. We've
// determined that there might have been a better path to get to
// this node so it needs to be re-evaluated
if (nextStepCost < neighbour.cost) {
if (inOpenList(neighbour)) {
removeFromOpen(neighbour);
}
if (inClosedList(neighbour)) {
removeFromClosed(neighbour);
}
}
// if the node hasn't already been processed and discarded then
// reset it's cost to our current cost and add it as a next possible
// step (i.e. to the open list)
if (!inOpenList(neighbour) && !(inClosedList(neighbour))) {
neighbour.cost = nextStepCost;
neighbour.heuristic = getHeuristicCost(mover, xp, yp, tx, ty);
maxDepth = Math.max(maxDepth, neighbour.setParent(current));
addToOpen(neighbour);
}
}
}
}
}
// since we've got an empty open list or we've run out of search
// there was no path. Just return null
if (nodes[tx][ty].parent == null) {
return null;
}
// At this point we've definitely found a path so we can uses the parent
// references of the nodes to find out way from the target location back
// to the start recording the nodes on the way.
Path path = new Path();
Node target = nodes[tx][ty];
while (target != nodes[sx][sy]) {
path.prependStep(target.x, target.y);
target = target.parent;
}
path.prependStep(sx,sy);
// thats it, we have our path
return path;
}
/**
* Get the X coordinate of the node currently being evaluated
*
* @return The X coordinate of the node currently being evaluated
*/
public int getCurrentX() {
if (current == null) {
return -1;
}
return current.x;
}
/**
* Get the Y coordinate of the node currently being evaluated
*
* @return The Y coordinate of the node currently being evaluated
*/
public int getCurrentY() {
if (current == null) {
return -1;
}
return current.y;
}
/**
* Get the first element from the open list. This is the next
* one to be searched.
*
* @return The first element in the open list
*/
protected Node getFirstInOpen() {
return (Node) open.first();
}
/**
* Add a node to the open list
*
* @param node The node to be added to the open list
*/
protected void addToOpen(Node node) {
node.setOpen(true);
open.add(node);
}
/**
* Check if a node is in the open list
*
* @param node The node to check for
* @return True if the node given is in the open list
*/
protected boolean inOpenList(Node node) {
return node.isOpen();
}
/**
* Remove a node from the open list
*
* @param node The node to remove from the open list
*/
protected void removeFromOpen(Node node) {
node.setOpen(false);
open.remove(node);
}
/**
* Add a node to the closed list
*
* @param node The node to add to the closed list
*/
protected void addToClosed(Node node) {
node.setClosed(true);
closed.add(node);
}
/**
* Check if the node supplied is in the closed list
*
* @param node The node to search for
* @return True if the node specified is in the closed list
*/
protected boolean inClosedList(Node node) {
return node.isClosed();
}
/**
* Remove a node from the closed list
*
* @param node The node to remove from the closed list
*/
protected void removeFromClosed(Node node) {
node.setClosed(false);
closed.remove(node);
}
/**
* Check if a given location is valid for the supplied mover
*
* @param mover The mover that would hold a given location
* @param sx The starting x coordinate
* @param sy The starting y coordinate
* @param x The x coordinate of the location to check
* @param y The y coordinate of the location to check
* @return True if the location is valid for the given mover
*/
protected boolean isValidLocation(Mover mover, int sx, int sy, int x, int y) {
boolean invalid = (x < 0) || (y < 0) || (x >= map.getWidthInTiles()) || (y >= map.getHeightInTiles());
if ((!invalid) && ((sx != x) || (sy != y))) {
this.mover = mover;
this.sourceX = sx;
this.sourceY = sy;
invalid = map.blocked(this, x, y);
}
return !invalid;
}
/**
* Get the cost to move through a given location
*
* @param mover The entity that is being moved
* @param sx The x coordinate of the tile whose cost is being determined
* @param sy The y coordiante of the tile whose cost is being determined
* @param tx The x coordinate of the target location
* @param ty The y coordinate of the target location
* @return The cost of movement through the given tile
*/
public float getMovementCost(Mover mover, int sx, int sy, int tx, int ty) {
this.mover = mover;
this.sourceX = sx;
this.sourceY = sy;
return map.getCost(this, tx, ty);
}
/**
* Get the heuristic cost for the given location. This determines in which
* order the locations are processed.
*
* @param mover The entity that is being moved
* @param x The x coordinate of the tile whose cost is being determined
* @param y The y coordiante of the tile whose cost is being determined
* @param tx The x coordinate of the target location
* @param ty The y coordinate of the target location
* @return The heuristic cost assigned to the tile
*/
public float getHeuristicCost(Mover mover, int x, int y, int tx, int ty) {
return heuristic.getCost(map, mover, x, y, tx, ty);
}
/**
* A list that sorts any element provided into the list
*
* @author kevin
*/
private class PriorityList {
/** The list of elements */
private List list = new LinkedList();
/**
* Retrieve the first element from the list
*
* @return The first element from the list
*/
public Object first() {
return list.get(0);
}
/**
* Empty the list
*/
public void clear() {
list.clear();
}
/**
* Add an element to the list - causes sorting
*
* @param o The element to add
*/
public void add(Object o) {
// float the new entry
for (int i=0;i<list.size();i++) {
if (((Comparable) list.get(i)).compareTo(o) > 0) {
list.add(i, o);
break;
}
}
if (!list.contains(o)) {
list.add(o);
}
//Collections.sort(list);
}
/**
* Remove an element from the list
*
* @param o The element to remove
*/
public void remove(Object o) {
list.remove(o);
}
/**
* Get the number of elements in the list
*
* @return The number of element in the list
*/
public int size() {
return list.size();
}
/**
* Check if an element is in the list
*
* @param o The element to search for
* @return True if the element is in the list
*/
public boolean contains(Object o) {
return list.contains(o);
}
public String toString() {
String temp = "{";
for (int i=0;i<size();i++) {
temp += list.get(i).toString()+",";
}
temp += "}";
return temp;
}
}
/**
* A single node in the search graph
*/
private class Node implements Comparable {
/** The x coordinate of the node */
private int x;
/** The y coordinate of the node */
private int y;
/** The path cost for this node */
private float cost;
/** The parent of this node, how we reached it in the search */
private Node parent;
/** The heuristic cost of this node */
private float heuristic;
/** The search depth of this node */
private int depth;
/** In the open list */
private boolean open;
/** In the closed list */
private boolean closed;
/**
* Create a new node
*
* @param x The x coordinate of the node
* @param y The y coordinate of the node
*/
public Node(int x, int y) {
this.x = x;
this.y = y;
}
/**
* Set the parent of this node
*
* @param parent The parent node which lead us to this node
* @return The depth we have no reached in searching
*/
public int setParent(Node parent) {
depth = parent.depth + 1;
this.parent = parent;
return depth;
}
/**
* @see Comparable#compareTo(Object)
*/
public int compareTo(Object other) {
Node o = (Node) other;
float f = heuristic + cost;
float of = o.heuristic + o.cost;
if (f < of) {
return -1;
} else if (f > of) {
return 1;
} else {
return 0;
}
}
/**
* Indicate whether the node is in the open list
*
* @param open True if the node is in the open list
*/
public void setOpen(boolean open) {
this.open = open;
}
/**
* Check if the node is in the open list
*
* @return True if the node is in the open list
*/
public boolean isOpen() {
return open;
}
/**
* Indicate whether the node is in the closed list
*
* @param closed True if the node is in the closed list
*/
public void setClosed(boolean closed) {
this.closed = closed;
}
/**
* Check if the node is in the closed list
*
* @return True if the node is in the closed list
*/
public boolean isClosed() {
return closed;
}
/**
* Reset the state of this node
*/
public void reset() {
closed = false;
open = false;
cost = 0;
depth = 0;
}
/**
* @see java.lang.Object#toString()
*/
public String toString() {
return "[Node "+x+","+y+"]";
}
}
public Mover getMover() {
return mover;
}
public int getSearchDistance() {
return distance;
}
public int getSourceX() {
return sourceX;
}
public int getSourceY() {
return sourceY;
}
}
| false | true | public Path findPath(Mover mover, int sx, int sy, int tx, int ty) {
current = null;
// easy first check, if the destination is blocked, we can't get there
this.mover = mover;
this.sourceX = sx;
this.sourceY = sy;
this.distance = 0;
if (map.blocked(this, tx, ty)) {
return null;
}
for (int x=0;x<map.getWidthInTiles();x++) {
for (int y=0;y<map.getHeightInTiles();y++) {
nodes[x][y].reset();
}
}
// initial state for A*. The closed group is empty. Only the starting
// tile is in the open list and it's cost is zero, i.e. we're already there
nodes[sx][sy].cost = 0;
nodes[sx][sy].depth = 0;
closed.clear();
open.clear();
addToOpen(nodes[sx][sy]);
nodes[tx][ty].parent = null;
// while we haven't found the goal and haven't exceeded our max search depth
int maxDepth = 0;
while ((maxDepth < maxSearchDistance) && (open.size() != 0)) {
// pull out the first node in our open list, this is determined to
// be the most likely to be the next step based on our heuristic
current = getFirstInOpen();
if (current == nodes[tx][ty]) {
break;
}
distance = current.depth;
removeFromOpen(current);
addToClosed(current);
// search through all the neighbours of the current node evaluating
// them as next steps
for (int x=-1;x<2;x++) {
for (int y=-1;y<2;y++) {
// not a neighbour, its the current tile
if ((x == 0) && (y == 0)) {
continue;
}
// if we're not allowing diaganol movement then only
// one of x or y can be set
if (!allowDiagMovement) {
if ((x != 0) && (y != 0)) {
continue;
}
}
// determine the location of the neighbour and evaluate it
int xp = x + current.x;
int yp = y + current.y;
if (isValidLocation(mover,sx,sy,xp,yp)) {
// the cost to get to this node is cost the current plus the movement
// cost to reach this node. Note that the heursitic value is only used
// in the sorted open list
float nextStepCost = current.cost + getMovementCost(mover, current.x, current.y, xp, yp);
Node neighbour = nodes[xp][yp];
map.pathFinderVisited(xp, yp);
// if the new cost we've determined for this node is lower than
// it has been previously makes sure the node hasn't been discarded. We've
// determined that there might have been a better path to get to
// this node so it needs to be re-evaluated
if (nextStepCost < neighbour.cost) {
if (inOpenList(neighbour)) {
removeFromOpen(neighbour);
}
if (inClosedList(neighbour)) {
removeFromClosed(neighbour);
}
}
// if the node hasn't already been processed and discarded then
// reset it's cost to our current cost and add it as a next possible
// step (i.e. to the open list)
if (!inOpenList(neighbour) && !(inClosedList(neighbour))) {
neighbour.cost = nextStepCost;
neighbour.heuristic = getHeuristicCost(mover, xp, yp, tx, ty);
maxDepth = Math.max(maxDepth, neighbour.setParent(current));
addToOpen(neighbour);
}
}
}
}
}
// since we've got an empty open list or we've run out of search
// there was no path. Just return null
if (nodes[tx][ty].parent == null) {
return null;
}
// At this point we've definitely found a path so we can uses the parent
// references of the nodes to find out way from the target location back
// to the start recording the nodes on the way.
Path path = new Path();
Node target = nodes[tx][ty];
while (target != nodes[sx][sy]) {
path.prependStep(target.x, target.y);
target = target.parent;
}
path.prependStep(sx,sy);
// thats it, we have our path
return path;
}
| public Path findPath(Mover mover, int sx, int sy, int tx, int ty) {
current = null;
// easy first check, if the destination is blocked, we can't get there
this.mover = mover;
this.sourceX = tx;
this.sourceY = ty;
this.distance = 0;
if (map.blocked(this, tx, ty)) {
return null;
}
for (int x=0;x<map.getWidthInTiles();x++) {
for (int y=0;y<map.getHeightInTiles();y++) {
nodes[x][y].reset();
}
}
// initial state for A*. The closed group is empty. Only the starting
// tile is in the open list and it's cost is zero, i.e. we're already there
nodes[sx][sy].cost = 0;
nodes[sx][sy].depth = 0;
closed.clear();
open.clear();
addToOpen(nodes[sx][sy]);
nodes[tx][ty].parent = null;
// while we haven't found the goal and haven't exceeded our max search depth
int maxDepth = 0;
while ((maxDepth < maxSearchDistance) && (open.size() != 0)) {
// pull out the first node in our open list, this is determined to
// be the most likely to be the next step based on our heuristic
current = getFirstInOpen();
if (current == nodes[tx][ty]) {
break;
}
distance = current.depth;
removeFromOpen(current);
addToClosed(current);
// search through all the neighbours of the current node evaluating
// them as next steps
for (int x=-1;x<2;x++) {
for (int y=-1;y<2;y++) {
// not a neighbour, its the current tile
if ((x == 0) && (y == 0)) {
continue;
}
// if we're not allowing diaganol movement then only
// one of x or y can be set
if (!allowDiagMovement) {
if ((x != 0) && (y != 0)) {
continue;
}
}
// determine the location of the neighbour and evaluate it
int xp = x + current.x;
int yp = y + current.y;
if (isValidLocation(mover,current.x,current.y,xp,yp)) {
// the cost to get to this node is cost the current plus the movement
// cost to reach this node. Note that the heursitic value is only used
// in the sorted open list
float nextStepCost = current.cost + getMovementCost(mover, current.x, current.y, xp, yp);
Node neighbour = nodes[xp][yp];
map.pathFinderVisited(xp, yp);
// if the new cost we've determined for this node is lower than
// it has been previously makes sure the node hasn't been discarded. We've
// determined that there might have been a better path to get to
// this node so it needs to be re-evaluated
if (nextStepCost < neighbour.cost) {
if (inOpenList(neighbour)) {
removeFromOpen(neighbour);
}
if (inClosedList(neighbour)) {
removeFromClosed(neighbour);
}
}
// if the node hasn't already been processed and discarded then
// reset it's cost to our current cost and add it as a next possible
// step (i.e. to the open list)
if (!inOpenList(neighbour) && !(inClosedList(neighbour))) {
neighbour.cost = nextStepCost;
neighbour.heuristic = getHeuristicCost(mover, xp, yp, tx, ty);
maxDepth = Math.max(maxDepth, neighbour.setParent(current));
addToOpen(neighbour);
}
}
}
}
}
// since we've got an empty open list or we've run out of search
// there was no path. Just return null
if (nodes[tx][ty].parent == null) {
return null;
}
// At this point we've definitely found a path so we can uses the parent
// references of the nodes to find out way from the target location back
// to the start recording the nodes on the way.
Path path = new Path();
Node target = nodes[tx][ty];
while (target != nodes[sx][sy]) {
path.prependStep(target.x, target.y);
target = target.parent;
}
path.prependStep(sx,sy);
// thats it, we have our path
return path;
}
|
diff --git a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/provider/BeanManagerProviderTest.java b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/provider/BeanManagerProviderTest.java
index aa8b9481..6d8b51fd 100644
--- a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/provider/BeanManagerProviderTest.java
+++ b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/provider/BeanManagerProviderTest.java
@@ -1,62 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.test.core.api.provider;
import org.apache.deltaspike.core.api.provider.BeanManagerProvider;
import org.apache.deltaspike.test.core.api.temptestutil.ShrinkWrapArchiveUtil;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.enterprise.inject.spi.BeanManager;
@RunWith(Arquillian.class)
public class BeanManagerProviderTest
{
/**
*X TODO creating a WebArchive is only a workaround because JavaArchive cannot contain other archives.
*/
@Deployment
public static WebArchive deploy()
{
return ShrinkWrap.create(WebArchive.class)
.addAsLibraries(ShrinkWrapArchiveUtil.getArchives(null,
"META-INF/beans.xml",
new String[]{"org.apache.deltaspike"},
null))
- .addClass(TestBean.class)
- .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
+ .addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml");
}
@Test
public void testBeanManagerProvider() throws Exception
{
BeanManagerProvider bmp = BeanManagerProvider.getInstance();
Assert.assertNotNull(bmp);
BeanManager bm = bmp.getBeanManager();
Assert.assertNotNull(bm);
}
}
| true | true | public static WebArchive deploy()
{
return ShrinkWrap.create(WebArchive.class)
.addAsLibraries(ShrinkWrapArchiveUtil.getArchives(null,
"META-INF/beans.xml",
new String[]{"org.apache.deltaspike"},
null))
.addClass(TestBean.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
}
| public static WebArchive deploy()
{
return ShrinkWrap.create(WebArchive.class)
.addAsLibraries(ShrinkWrapArchiveUtil.getArchives(null,
"META-INF/beans.xml",
new String[]{"org.apache.deltaspike"},
null))
.addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml");
}
|
diff --git a/src/org/apache/xerces/validators/datatype/AbstractNumericFacetValidator.java b/src/org/apache/xerces/validators/datatype/AbstractNumericFacetValidator.java
index 5a3a3b0b..1b885fe3 100644
--- a/src/org/apache/xerces/validators/datatype/AbstractNumericFacetValidator.java
+++ b/src/org/apache/xerces/validators/datatype/AbstractNumericFacetValidator.java
@@ -1,441 +1,447 @@
/*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999, 2000, 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 2001, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.xerces.validators.datatype;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import java.io.IOException;
import org.apache.xerces.validators.schema.SchemaSymbols;
import org.apache.xerces.utils.regex.RegularExpression;
/**
* AbstractNumericFacetValidator is a base class for decimal, double, float,
* and all date/time datatype validators. It implements evaluation of common facets -
* minInclusive, maxInclusive, minExclusive, maxExclusive according to schema specs.
*
* @author Elena Litani
* @version $Id$
*/
public abstract class AbstractNumericFacetValidator extends AbstractDatatypeValidator {
protected Object[] fEnumeration = null;
protected Object fMaxInclusive = null;
protected Object fMaxExclusive = null;
protected Object fMinInclusive = null;
protected Object fMinExclusive = null;
public AbstractNumericFacetValidator () throws InvalidDatatypeFacetException {
this( null, null, false ); // Native, No Facets defined, Restriction
}
public AbstractNumericFacetValidator ( DatatypeValidator base,
Hashtable facets,
boolean derivedByList) throws InvalidDatatypeFacetException {
fBaseValidator = base;
// list types are handled by ListDatatypeValidator, we do nothing here.
if ( derivedByList )
return;
initializeValues();
// Set Facets if any defined
if ( facets != null ) {
Vector enumeration = null;
for ( Enumeration e = facets.keys(); e.hasMoreElements(); ) {
String key = (String) e.nextElement();
String value = null;
try {
if ( key.equals(SchemaSymbols.ELT_PATTERN) ) {
fFacetsDefined |= DatatypeValidator.FACET_PATTERN;
fPattern = (String) facets.get(key);
if ( fPattern != null )
fRegex = new RegularExpression(fPattern, "X" );
}
else if ( key.equals(SchemaSymbols.ELT_ENUMERATION) ) {
enumeration = (Vector)facets.get(key);
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
}
else if ( key.equals(SchemaSymbols.ELT_MAXINCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXINCLUSIVE;
setMaxInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MAXEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXEXCLUSIVE;
setMaxExclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MININCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MININCLUSIVE;
setMinInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MINEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MINEXCLUSIVE;
setMinExclusive(value);
}
else if (key.equals(DatatypeValidator.FACET_FIXED)) {// fixed flags
fFlags = ((Short)facets.get(key)).shortValue();
}
else {
assignAdditionalFacets(key, facets);
}
}
catch ( Exception ex ) {
- throw new InvalidDatatypeFacetException( getErrorString( DatatypeMessageProvider.ILLEGAL_FACET_VALUE,
- DatatypeMessageProvider.MSG_NONE, new Object [] { value, key}));
+ if (value == null) {
+ //invalid facet error
+ throw new InvalidDatatypeFacetException( ex.getMessage());
+ }
+ else{
+ throw new InvalidDatatypeFacetException( (getErrorString (DatatypeMessageProvider.ILLEGAL_FACET_VALUE,
+ DatatypeMessageProvider.MSG_NONE, new Object [] { value, key})));
+ }
}
}
if ( fFacetsDefined != 0 ) {
// check 4.3.8.c1 error: maxInclusive + maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both maxInclusive and maxExclusive to be specified for the same datatype." );
}
// check 4.3.9.c1 error: minInclusive + minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both minInclusive and minExclusive to be specified for the same datatype." );
}
// check 4.3.7.c1 must: minInclusive <= maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be <= maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.8.c2 must: minExclusive <= maxExclusive ??? minExclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxExclusive) == 1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be <= maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
// check 4.3.9.c2 must: minExclusive < maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxInclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be > maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.10.c1 must: minInclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be < maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
checkFacetConstraints();
}
if ( base != null ) {
AbstractNumericFacetValidator numBase = (AbstractNumericFacetValidator)base;
if ( fFacetsDefined != 0 ) {
// check 4.3.7.c2 error:
// maxInclusive > base.maxInclusive
// maxInclusive >= base.maxExclusive
// maxInclusive < base.minInclusive
// maxInclusive <= base.minExclusive
int result;
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0)){
result = compareValues(fMaxInclusive, numBase.fMaxInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXINCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxInclusive value = '" + getMaxInclusive(false) +
"' must be equal to base.maxInclusive value = '" +
getMaxInclusive(true) + "' with attribute {fixed} = true" );
}
if ( result == 1 ){
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
}
}
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
// check 4.3.8.c3 error:
// maxExclusive > base.maxExclusive
// maxExclusive > base.maxInclusive
// maxExclusive <= base.minInclusive
// maxExclusive <= base.minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0)){
result= compareValues(fMaxExclusive, numBase.fMaxExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxExclusive value = '" + getMaxExclusive(false) +
"' must be equal to base.maxExclusive value = '" +
getMaxExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == 1) {
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinInclusive) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
// check 4.3.9.c3 error:
// minExclusive < base.minExclusive
// minExclusive > base.maxInclusive ??? minExclusive >= base.maxInclusive
// minExclusive < base.minInclusive
// minExclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0)){
result= compareValues(fMinExclusive, numBase.fMinExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MINEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minExclusive value = '" + getMinExclusive(false) +
"' must be equal to base.minExclusive value = '" +
getMinExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1) {
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
// check 4.3.10.c2 error:
// minInclusive < base.minInclusive
// minInclusive > base.maxInclusive
// minInclusive <= base.minExclusive
// minInclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0)){
result = compareValues(fMinInclusive, numBase.fMinInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MININCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minInclusive value = '" + getMinInclusive(false) +
"' must be equal to base.minInclusive value = '" +
getMinInclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1 ){
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
checkBaseFacetConstraints();
}
// check question error: fractionDigits > base.fractionDigits ???
// check question error: fractionDigits > base.totalDigits ???
// check question error: totalDigits conflicts with bounds ???
// inherit enumeration
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) == 0 &&
(numBase.fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
fEnumeration = numBase.fEnumeration;
}
// inherit maxExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXEXCLUSIVE;
fMaxExclusive = numBase.fMaxExclusive;
}
// inherit maxInclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXINCLUSIVE;
fMaxInclusive = numBase.fMaxInclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MINEXCLUSIVE;
fMinExclusive = numBase.fMinExclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MININCLUSIVE;
fMinInclusive = numBase.fMinInclusive;
}
inheritAdditionalFacets();
//inherit fixed values
fFlags |= numBase.fFlags;
// check 4.3.5.c0 must: enumeration values from the value space of base
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
if ( enumeration != null ) {
try {
setEnumeration(enumeration);
}
catch ( Exception idve ) {
throw new InvalidDatatypeFacetException( idve.getMessage());
}
}
}
}
}//End of Facet setup
}
//
// Compares values in value space of give datatype
//
abstract protected int compareValues (Object value1, Object value2);
//
// set* functions used to set facets values
//
abstract protected void setMaxInclusive (String value);
abstract protected void setMinInclusive (String value);
abstract protected void setMaxExclusive (String value);
abstract protected void setMinExclusive (String value);
abstract protected void setEnumeration (Vector enumeration)
throws InvalidDatatypeValueException;
//
// get* functions used to output error messages
//
abstract protected String getMaxInclusive (boolean isBase);
abstract protected String getMinInclusive (boolean isBase);
abstract protected String getMaxExclusive (boolean isBase);
abstract protected String getMinExclusive (boolean isBase);
//
// date/times need to initialize structure objects
//
protected void initializeValues() {}
//
// decimal has fractionDigits and totalDigits facets
// all other datatypes will throw InvalidDatatypeFacetException
//
abstract protected void assignAdditionalFacets(String key, Hashtable facets)
throws InvalidDatatypeFacetException;
//
// decimal needs to inherit totalDigits and fractionDigits
//
protected void inheritAdditionalFacets() {}
//
// decimal needs to check constraints on totalDigits and fractionDigits
// check is done against fBaseValidator
//
protected void checkBaseFacetConstraints() throws InvalidDatatypeFacetException {}
//
// decimal needs to check constraints on totalDigits and fractionDigits
//
protected void checkFacetConstraints() throws InvalidDatatypeFacetException {}
}
| true | true | public AbstractNumericFacetValidator ( DatatypeValidator base,
Hashtable facets,
boolean derivedByList) throws InvalidDatatypeFacetException {
fBaseValidator = base;
// list types are handled by ListDatatypeValidator, we do nothing here.
if ( derivedByList )
return;
initializeValues();
// Set Facets if any defined
if ( facets != null ) {
Vector enumeration = null;
for ( Enumeration e = facets.keys(); e.hasMoreElements(); ) {
String key = (String) e.nextElement();
String value = null;
try {
if ( key.equals(SchemaSymbols.ELT_PATTERN) ) {
fFacetsDefined |= DatatypeValidator.FACET_PATTERN;
fPattern = (String) facets.get(key);
if ( fPattern != null )
fRegex = new RegularExpression(fPattern, "X" );
}
else if ( key.equals(SchemaSymbols.ELT_ENUMERATION) ) {
enumeration = (Vector)facets.get(key);
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
}
else if ( key.equals(SchemaSymbols.ELT_MAXINCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXINCLUSIVE;
setMaxInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MAXEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXEXCLUSIVE;
setMaxExclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MININCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MININCLUSIVE;
setMinInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MINEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MINEXCLUSIVE;
setMinExclusive(value);
}
else if (key.equals(DatatypeValidator.FACET_FIXED)) {// fixed flags
fFlags = ((Short)facets.get(key)).shortValue();
}
else {
assignAdditionalFacets(key, facets);
}
}
catch ( Exception ex ) {
throw new InvalidDatatypeFacetException( getErrorString( DatatypeMessageProvider.ILLEGAL_FACET_VALUE,
DatatypeMessageProvider.MSG_NONE, new Object [] { value, key}));
}
}
if ( fFacetsDefined != 0 ) {
// check 4.3.8.c1 error: maxInclusive + maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both maxInclusive and maxExclusive to be specified for the same datatype." );
}
// check 4.3.9.c1 error: minInclusive + minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both minInclusive and minExclusive to be specified for the same datatype." );
}
// check 4.3.7.c1 must: minInclusive <= maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be <= maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.8.c2 must: minExclusive <= maxExclusive ??? minExclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxExclusive) == 1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be <= maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
// check 4.3.9.c2 must: minExclusive < maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxInclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be > maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.10.c1 must: minInclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be < maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
checkFacetConstraints();
}
if ( base != null ) {
AbstractNumericFacetValidator numBase = (AbstractNumericFacetValidator)base;
if ( fFacetsDefined != 0 ) {
// check 4.3.7.c2 error:
// maxInclusive > base.maxInclusive
// maxInclusive >= base.maxExclusive
// maxInclusive < base.minInclusive
// maxInclusive <= base.minExclusive
int result;
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0)){
result = compareValues(fMaxInclusive, numBase.fMaxInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXINCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxInclusive value = '" + getMaxInclusive(false) +
"' must be equal to base.maxInclusive value = '" +
getMaxInclusive(true) + "' with attribute {fixed} = true" );
}
if ( result == 1 ){
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
}
}
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
// check 4.3.8.c3 error:
// maxExclusive > base.maxExclusive
// maxExclusive > base.maxInclusive
// maxExclusive <= base.minInclusive
// maxExclusive <= base.minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0)){
result= compareValues(fMaxExclusive, numBase.fMaxExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxExclusive value = '" + getMaxExclusive(false) +
"' must be equal to base.maxExclusive value = '" +
getMaxExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == 1) {
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinInclusive) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
// check 4.3.9.c3 error:
// minExclusive < base.minExclusive
// minExclusive > base.maxInclusive ??? minExclusive >= base.maxInclusive
// minExclusive < base.minInclusive
// minExclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0)){
result= compareValues(fMinExclusive, numBase.fMinExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MINEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minExclusive value = '" + getMinExclusive(false) +
"' must be equal to base.minExclusive value = '" +
getMinExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1) {
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
// check 4.3.10.c2 error:
// minInclusive < base.minInclusive
// minInclusive > base.maxInclusive
// minInclusive <= base.minExclusive
// minInclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0)){
result = compareValues(fMinInclusive, numBase.fMinInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MININCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minInclusive value = '" + getMinInclusive(false) +
"' must be equal to base.minInclusive value = '" +
getMinInclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1 ){
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
checkBaseFacetConstraints();
}
// check question error: fractionDigits > base.fractionDigits ???
// check question error: fractionDigits > base.totalDigits ???
// check question error: totalDigits conflicts with bounds ???
// inherit enumeration
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) == 0 &&
(numBase.fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
fEnumeration = numBase.fEnumeration;
}
// inherit maxExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXEXCLUSIVE;
fMaxExclusive = numBase.fMaxExclusive;
}
// inherit maxInclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXINCLUSIVE;
fMaxInclusive = numBase.fMaxInclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MINEXCLUSIVE;
fMinExclusive = numBase.fMinExclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MININCLUSIVE;
fMinInclusive = numBase.fMinInclusive;
}
inheritAdditionalFacets();
//inherit fixed values
fFlags |= numBase.fFlags;
// check 4.3.5.c0 must: enumeration values from the value space of base
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
if ( enumeration != null ) {
try {
setEnumeration(enumeration);
}
catch ( Exception idve ) {
throw new InvalidDatatypeFacetException( idve.getMessage());
}
}
}
}
}//End of Facet setup
}
| public AbstractNumericFacetValidator ( DatatypeValidator base,
Hashtable facets,
boolean derivedByList) throws InvalidDatatypeFacetException {
fBaseValidator = base;
// list types are handled by ListDatatypeValidator, we do nothing here.
if ( derivedByList )
return;
initializeValues();
// Set Facets if any defined
if ( facets != null ) {
Vector enumeration = null;
for ( Enumeration e = facets.keys(); e.hasMoreElements(); ) {
String key = (String) e.nextElement();
String value = null;
try {
if ( key.equals(SchemaSymbols.ELT_PATTERN) ) {
fFacetsDefined |= DatatypeValidator.FACET_PATTERN;
fPattern = (String) facets.get(key);
if ( fPattern != null )
fRegex = new RegularExpression(fPattern, "X" );
}
else if ( key.equals(SchemaSymbols.ELT_ENUMERATION) ) {
enumeration = (Vector)facets.get(key);
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
}
else if ( key.equals(SchemaSymbols.ELT_MAXINCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXINCLUSIVE;
setMaxInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MAXEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MAXEXCLUSIVE;
setMaxExclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MININCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MININCLUSIVE;
setMinInclusive(value);
}
else if ( key.equals(SchemaSymbols.ELT_MINEXCLUSIVE) ) {
value = ((String) facets.get(key ));
fFacetsDefined |= DatatypeValidator.FACET_MINEXCLUSIVE;
setMinExclusive(value);
}
else if (key.equals(DatatypeValidator.FACET_FIXED)) {// fixed flags
fFlags = ((Short)facets.get(key)).shortValue();
}
else {
assignAdditionalFacets(key, facets);
}
}
catch ( Exception ex ) {
if (value == null) {
//invalid facet error
throw new InvalidDatatypeFacetException( ex.getMessage());
}
else{
throw new InvalidDatatypeFacetException( (getErrorString (DatatypeMessageProvider.ILLEGAL_FACET_VALUE,
DatatypeMessageProvider.MSG_NONE, new Object [] { value, key})));
}
}
}
if ( fFacetsDefined != 0 ) {
// check 4.3.8.c1 error: maxInclusive + maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both maxInclusive and maxExclusive to be specified for the same datatype." );
}
// check 4.3.9.c1 error: minInclusive + minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
throw new InvalidDatatypeFacetException( "It is an error for both minInclusive and minExclusive to be specified for the same datatype." );
}
// check 4.3.7.c1 must: minInclusive <= maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be <= maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.8.c2 must: minExclusive <= maxExclusive ??? minExclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxExclusive) == 1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be <= maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
// check 4.3.9.c2 must: minExclusive < maxInclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( compareValues(fMinExclusive, fMaxInclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "'must be > maxInclusive value ='" +
getMaxInclusive(false) + "'. " );
}
// check 4.3.10.c1 must: minInclusive < maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( compareValues(fMinInclusive, fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "'must be < maxExclusive value ='" +
getMaxExclusive(false) + "'. " );
}
checkFacetConstraints();
}
if ( base != null ) {
AbstractNumericFacetValidator numBase = (AbstractNumericFacetValidator)base;
if ( fFacetsDefined != 0 ) {
// check 4.3.7.c2 error:
// maxInclusive > base.maxInclusive
// maxInclusive >= base.maxExclusive
// maxInclusive < base.minInclusive
// maxInclusive <= base.minExclusive
int result;
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0)){
result = compareValues(fMaxInclusive, numBase.fMaxInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXINCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxInclusive value = '" + getMaxInclusive(false) +
"' must be equal to base.maxInclusive value = '" +
getMaxInclusive(true) + "' with attribute {fixed} = true" );
}
if ( result == 1 ){
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
}
}
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException( "maxInclusive value ='" + getMaxInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException(
"maxInclusive value ='" + getMaxInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
// check 4.3.8.c3 error:
// maxExclusive > base.maxExclusive
// maxExclusive > base.maxInclusive
// maxExclusive <= base.minInclusive
// maxExclusive <= base.minExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0)){
result= compareValues(fMaxExclusive, numBase.fMaxExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "maxExclusive value = '" + getMaxExclusive(false) +
"' must be equal to base.maxExclusive value = '" +
getMaxExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == 1) {
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMaxExclusive, numBase.fMinInclusive) != 1 )
throw new InvalidDatatypeFacetException( "maxExclusive value ='" + getMaxExclusive(false) + "' must be > base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
// check 4.3.9.c3 error:
// minExclusive < base.minExclusive
// minExclusive > base.maxInclusive ??? minExclusive >= base.maxInclusive
// minExclusive < base.minInclusive
// minExclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) ) {
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0)){
result= compareValues(fMinExclusive, numBase.fMinExclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MINEXCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minExclusive value = '" + getMinExclusive(false) +
"' must be equal to base.minExclusive value = '" +
getMinExclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1) {
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minExclusive value ='" +
getMinExclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMinInclusive) == -1 )
throw new InvalidDatatypeFacetException(
"minExclusive value ='" + getMinExclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinExclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minExclusive value ='" + getMinExclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
// check 4.3.10.c2 error:
// minInclusive < base.minInclusive
// minInclusive > base.maxInclusive
// minInclusive <= base.minExclusive
// minInclusive >= base.maxExclusive
if ( ((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
if ( ((numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0)){
result = compareValues(fMinInclusive, numBase.fMinInclusive);
if ((numBase.fFlags & DatatypeValidator.FACET_MININCLUSIVE) != 0 &&
result != 0) {
throw new InvalidDatatypeFacetException( "minInclusive value = '" + getMinInclusive(false) +
"' must be equal to base.minInclusive value = '" +
getMinInclusive(true) + "' with attribute {fixed} = true" );
}
if (result == -1 ){
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be >= base.minInclusive value ='" +
getMinInclusive(true) + "'." );
}
}
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxInclusive) == 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be <= base.maxInclusive value ='" +
getMaxInclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMinExclusive ) != 1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be > base.minExclusive value ='" +
getMinExclusive(true) + "'." );
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
compareValues(fMinInclusive, numBase.fMaxExclusive) != -1 )
throw new InvalidDatatypeFacetException( "minInclusive value ='" + getMinInclusive(false) + "' must be < base.maxExclusive value ='" +
getMaxExclusive(true) + "'." );
}
checkBaseFacetConstraints();
}
// check question error: fractionDigits > base.fractionDigits ???
// check question error: fractionDigits > base.totalDigits ???
// check question error: totalDigits conflicts with bounds ???
// inherit enumeration
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) == 0 &&
(numBase.fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
fFacetsDefined |= DatatypeValidator.FACET_ENUMERATION;
fEnumeration = numBase.fEnumeration;
}
// inherit maxExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXEXCLUSIVE;
fMaxExclusive = numBase.fMaxExclusive;
}
// inherit maxInclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MAXINCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MAXINCLUSIVE;
fMaxInclusive = numBase.fMaxInclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MINEXCLUSIVE;
fMinExclusive = numBase.fMinExclusive;
}
// inherit minExclusive
if ( (( numBase.fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) &&
!((fFacetsDefined & DatatypeValidator.FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & DatatypeValidator.FACET_MININCLUSIVE) != 0) ) {
fFacetsDefined |= FACET_MININCLUSIVE;
fMinInclusive = numBase.fMinInclusive;
}
inheritAdditionalFacets();
//inherit fixed values
fFlags |= numBase.fFlags;
// check 4.3.5.c0 must: enumeration values from the value space of base
if ( (fFacetsDefined & DatatypeValidator.FACET_ENUMERATION) != 0 ) {
if ( enumeration != null ) {
try {
setEnumeration(enumeration);
}
catch ( Exception idve ) {
throw new InvalidDatatypeFacetException( idve.getMessage());
}
}
}
}
}//End of Facet setup
}
|
diff --git a/src/main/java/com/joelhockey/jsunit/JSUnitTask.java b/src/main/java/com/joelhockey/jsunit/JSUnitTask.java
index 5bfa667..69cfa9e 100755
--- a/src/main/java/com/joelhockey/jsunit/JSUnitTask.java
+++ b/src/main/java/com/joelhockey/jsunit/JSUnitTask.java
@@ -1,94 +1,92 @@
/*
* The MIT Licence
*
* Copyright 2010 Joel Hockey ([email protected]). All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.joelhockey.jsunit;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.taskdefs.Java;
import org.apache.tools.ant.types.FileSet;
/**
* Ant 'jsunit' task for JSUnit extends from {@link Java}.
* Supports similar format to 'junit' task, including
* nested 'batchtest' elements and forking (fork once is default).
* Always does plain and xml reports.
* @author Joel Hockey
*/
public class JSUnitTask extends Java {
public static class BatchTest {
public String todir = "target/surefire-reports";
public List<FileSet> fileSets = new ArrayList<FileSet>();
public void addFileSet(FileSet fileSet) {
fileSets.add(fileSet);
}
public void setTodir(String todir) { this.todir = todir; }
}
private List<BatchTest> batchTests = new ArrayList<BatchTest>();
public JSUnitTask() {
setClassname(JSUnit.class.getName());
setFork(true);
setFailonerror(true);
}
public void addBatchTest(BatchTest batchTest) { batchTests.add(batchTest); }
public void execute() throws BuildException {
- // find all files that end with '.js'
// if -Dtest=? set, then filter based on it
- String filter = ".js";
String test = getProject().getProperty("test");
if (test != null && !test.endsWith(".js")) {
- filter = test + ".js";
+ test += ".js";
}
String todir = "";
String basedir = "";
for (BatchTest batchTest : batchTests) {
todir = setArgIfDifferent("-todir", todir, batchTest.todir);
for (FileSet fs : batchTest.fileSets) {
for (String file : fs.getDirectoryScanner(getProject()).getIncludedFiles()) {
basedir = setArgIfDifferent("-basedir", basedir, fs.getDir(getProject()).getAbsolutePath());
- if (file.endsWith(filter)) {
+ if (test == null || file.equals(test)) {
createArg().setValue(file);
}
}
}
}
super.execute();
}
private String setArgIfDifferent(String argName, String currentValue, String newValue) {
if (!newValue.equals(currentValue)) {
createArg().setValue(argName);
createArg().setValue(newValue);
}
return newValue;
}
}
| false | true | public void execute() throws BuildException {
// find all files that end with '.js'
// if -Dtest=? set, then filter based on it
String filter = ".js";
String test = getProject().getProperty("test");
if (test != null && !test.endsWith(".js")) {
filter = test + ".js";
}
String todir = "";
String basedir = "";
for (BatchTest batchTest : batchTests) {
todir = setArgIfDifferent("-todir", todir, batchTest.todir);
for (FileSet fs : batchTest.fileSets) {
for (String file : fs.getDirectoryScanner(getProject()).getIncludedFiles()) {
basedir = setArgIfDifferent("-basedir", basedir, fs.getDir(getProject()).getAbsolutePath());
if (file.endsWith(filter)) {
createArg().setValue(file);
}
}
}
}
super.execute();
}
| public void execute() throws BuildException {
// if -Dtest=? set, then filter based on it
String test = getProject().getProperty("test");
if (test != null && !test.endsWith(".js")) {
test += ".js";
}
String todir = "";
String basedir = "";
for (BatchTest batchTest : batchTests) {
todir = setArgIfDifferent("-todir", todir, batchTest.todir);
for (FileSet fs : batchTest.fileSets) {
for (String file : fs.getDirectoryScanner(getProject()).getIncludedFiles()) {
basedir = setArgIfDifferent("-basedir", basedir, fs.getDir(getProject()).getAbsolutePath());
if (test == null || file.equals(test)) {
createArg().setValue(file);
}
}
}
}
super.execute();
}
|
diff --git a/fog/src/de/tuilmenau/ics/fog/topology/Node.java b/fog/src/de/tuilmenau/ics/fog/topology/Node.java
index f3a95717..483d2625 100644
--- a/fog/src/de/tuilmenau/ics/fog/topology/Node.java
+++ b/fog/src/de/tuilmenau/ics/fog/topology/Node.java
@@ -1,570 +1,570 @@
/*******************************************************************************
* Forwarding on Gates Simulator/Emulator
* Copyright (C) 2012, Integrated Communication Systems Group, TU Ilmenau.
*
* This program and the accompanying materials are dual-licensed under either
* the terms of the Eclipse Public License v1.0 as published by the Eclipse
* Foundation
*
* or (per the licensee's choosing)
*
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
******************************************************************************/
package de.tuilmenau.ics.fog.topology;
import java.io.Serializable;
import java.util.LinkedList;
import java.util.Observable;
import java.util.Random;
import de.tuilmenau.ics.CommonSim.datastream.StreamTime;
import de.tuilmenau.ics.CommonSim.datastream.numeric.IDoubleWriter;
import de.tuilmenau.ics.CommonSim.datastream.numeric.SumNode;
import de.tuilmenau.ics.fog.Config;
import de.tuilmenau.ics.fog.EventHandler;
import de.tuilmenau.ics.fog.IEvent;
import de.tuilmenau.ics.fog.application.Application;
import de.tuilmenau.ics.fog.authentication.IdentityManagement;
import de.tuilmenau.ics.fog.facade.Description;
import de.tuilmenau.ics.fog.facade.Host;
import de.tuilmenau.ics.fog.facade.Identity;
import de.tuilmenau.ics.fog.facade.Name;
import de.tuilmenau.ics.fog.facade.Namespace;
import de.tuilmenau.ics.fog.facade.NetworkException;
import de.tuilmenau.ics.fog.packets.Packet;
import de.tuilmenau.ics.fog.routing.Route;
import de.tuilmenau.ics.fog.routing.RoutingService;
import de.tuilmenau.ics.fog.routing.RoutingServiceMultiplexer;
import de.tuilmenau.ics.fog.topology.ILowerLayerReceive.Status;
import de.tuilmenau.ics.fog.transfer.TransferPlane;
import de.tuilmenau.ics.fog.transfer.TransferPlaneObserver.NamingLevel;
import de.tuilmenau.ics.fog.transfer.forwardingNodes.Multiplexer;
import de.tuilmenau.ics.fog.transfer.manager.Controller;
import de.tuilmenau.ics.fog.transfer.manager.ProcessRegister;
import de.tuilmenau.ics.fog.ui.Logging;
import de.tuilmenau.ics.fog.util.Logger;
import de.tuilmenau.ics.fog.util.SimpleName;
import de.tuilmenau.ics.fog.util.ParameterMap;
/**
* A Node represents a host in a network.
* It provides all functions needed locally on a host, such as a routing
* and authentication service. Furthermore, it can be attached to lower
* layers providing connectivity to other nodes.
*/
public class Node extends Observable implements IElementDecorator
{
public Node(String pName, AutonomousSystem pAS, ParameterMap pParameters)
{
logger = new Logger(pAS.getLogger());
isBroken = false;
isShuttingDown = false;
name = pName;
as = pAS;
controlgate = new Controller(this);
transferPlane = new TransferPlane(getTimeBase(), logger);
// TODO move this stuff to hierarchical plug-in
Random tRandomGenerator = new Random(System.currentTimeMillis());
for(int i = 0; i < mBullyPriority.length; i++)
{
mBullyPriority[i] = tRandomGenerator.nextFloat() * 10;
}
Logging.log(this, "This node has priority " + mBullyPriority[0]);
parameters = pParameters;
// Note: Do not create central FN here, because we do not have
// a routing service available.
multiplexgate = null;
// set capabilities of the node
- String tCap = pParameters.get(Cap, null);
+ String tCap = getParameter().get(Cap, null);
if((tCap == null) || "all".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostExtended();
}
else if("half".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostBasic();
}
else if("none".equalsIgnoreCase(tCap)) {
capabilities = new Description();
}
// TEST:
// routingService = new RoutingServiceMultiplexer();
// ((RoutingServiceMultiplexer)routingService).add(new RoutingService(pRoutingService));
host = new Host(this);
authenticationService = IdentityManagement.getInstance(pAS, host);
ownIdentity = getAuthenticationService().createIdentity(name.toString());
}
/**
* @deprecated Since a node does not need to be named. Just the apps need names. Just for GUI use.
*/
public String getName()
{
return name;
}
public AutonomousSystem getAS()
{
return as;
}
/**
* Registers a routing service entity at a node.
*
* @param pRS Local routing service entity
*/
public void registerRoutingService(RoutingService pRS)
{
if(routingService == null) {
routingService = pRS;
} else {
// check, if already a multiplexer available
if(routingService instanceof RoutingServiceMultiplexer) {
((RoutingServiceMultiplexer) routingService).add(pRS);
} else {
// ... no -> create one and store old and new rs entities in it
RoutingService rs = routingService;
RoutingServiceMultiplexer rsMult = new RoutingServiceMultiplexer();
rsMult.add(rs);
rsMult.add(pRS);
// activate new RS multiplexer as new RS of node
routingService = rsMult;
}
}
// inform transfer service about new routing service
transferPlane.setRoutingService(routingService);
}
/**
* Unregisters a local routing service entity.
*
* @param pRS Routing service entity to unregister
* @returns true==success; false==RS was not registered
*/
public boolean unregisterRoutingService(RoutingService pRS)
{
if(routingService != null) {
// check, if already a multiplexer available
if(routingService instanceof RoutingServiceMultiplexer) {
return ((RoutingServiceMultiplexer) routingService).remove(pRS);
} else {
if(routingService == pRS) {
routingService = null;
return true;
}
}
}
return false;
}
/**
* @return Reference to routing service of node (!= null)
*/
public TransferPlane getTransferPlane()
{
// Debug check: It should not happen, since a node gets at least one
// routing service created by the RoutingServiceFactory.
if(transferPlane == null) throw new RuntimeException("Node " +this +" does not have a routing service.");
return transferPlane;
}
public boolean hasRoutingService()
{
return routingService != null;
}
public RoutingService getRoutingService()
{
return routingService;
}
public Controller getController()
{
return controlgate;
}
/**
* The main FN is just an implementation artifact. From the FoG concept, it is
* not needed. It would be possible to use several FNs within a node (e.g. one connecting
* the interfaces and one connecting the services). But a central one make debugging
* much easier and simplifies the attachment question for elements of the transfer
* service.
*
* @return The main FN of a node, which connects all interfaces and services within a node.
*/
public Multiplexer getCentralFN()
{
if(multiplexgate == null) {
Name nameObj = null;
if(!Config.Routing.REDUCE_NUMBER_FNS) {
nameObj = new SimpleName(NAMESPACE_HOST, name);
}
// Register node in routing services at attaching the first interface.
// It is important, that it is registered before the interface is created.
// TODO name for multiplexer is not really needed => remove it when code finished
multiplexgate = new Multiplexer(this, nameObj, NamingLevel.NAMES, Config.Routing.ENABLE_NODE_RS_HIERARCHY_LEVEL, ownIdentity, null, controlgate);
multiplexgate.open();
}
return multiplexgate;
}
public Identity getIdentity()
{
return ownIdentity;
}
public IdentityManagement getAuthenticationService()
{
return authenticationService;
}
public ProcessRegister getProcessRegister()
{
if(processes == null) {
processes = new ProcessRegister();
}
return processes;
}
/**
* @return Host facade of this node (!= null)
*/
public Host getHost()
{
return host;
}
/**
* @return Configuration of the simulation (!= null)
*/
public Config getConfig()
{
return as.getSimulation().getConfig();
}
/**
* @return Parameter set of the node (!= null)
*/
public ParameterMap getParameter()
{
// debug check:
if(parameters == null) parameters = new ParameterMap(false);
return parameters;
}
/**
* Method for sending test messages in the network.
*
* @deprecated Please use applications establishing connections to other applications.
* @param target Name of the destination node for the test message.
* @param data Data to be send.
* @throws NetworkException On error
*/
public void send(String target, Serializable data) throws NetworkException
{
if(transferPlane == null) {
throw new NetworkException("Node " +this +" does not have a routing service.");
}
Packet newpacket = null;
try {
Route route = transferPlane.getRoute(getCentralFN(), new SimpleName(NAMESPACE_HOST, target), Description.createBE(false), getIdentity());
newpacket = new Packet(route, data);
newpacket.setSourceNode(name);
newpacket.setTargetNode(target);
logger.log(this, "sending packet " +newpacket);
multiplexgate.handlePacket(newpacket, null);
} catch (NetworkException nExc) {
logger.err(this, "No route available", nExc);
}
}
public NetworkInterface attach(ILowerLayer lowerLayer)
{
return controlgate.addLink(lowerLayer);
}
public NetworkInterface detach(ILowerLayer lowerLayer)
{
if(controlgate != null) {
return controlgate.removeLink(lowerLayer);
}
return null;
}
public int getNumberLowerLayers()
{
return controlgate.getNumberLowerLayers();
}
public boolean isGateway()
{
return true;
}
public Status isBroken()
{
if(isBroken) {
if(isErrorTypeVisible) {
return Status.BROKEN;
} else {
return Status.UNKNOWN_ERROR;
}
} else {
return Status.OK;
}
}
public boolean isShuttingDown()
{
return isShuttingDown;
}
public void setBroken(boolean broken, boolean errorTypeVisible)
{
boolean stateChange = isBroken != broken;
isBroken = broken;
getLogger().info(this, "Node is now "+(broken ? "broken" : "working"));
if(isBroken) {
isErrorTypeVisible = errorTypeVisible;
as.getTimeBase().scheduleIn(10.0d, new IEvent() {
@Override
public void fire()
{
if(!repair()) {
as.getTimeBase().scheduleIn(10.0d, this);
}
}
});
}
if(stateChange) notifyObservers(broken);
}
/**
* Tells a node to shutdown all services running on it.
* It will be done in order to shutdown services before
* the node will be removed from simulation.
*/
public void shutdown(boolean waitForExit)
{
isShuttingDown = true;
// do not use list directly, because apps will remove themselves
// from the list, which invalidates iterators
LinkedList<Application> apps = new LinkedList<Application>(host.getApps());
for(Application app : apps) {
app.exit();
if(waitForExit) {
app.waitForExit();
}
}
}
/**
* Informs node that it was deleted from the scenario.
* Resets node and closes everything.
*/
public void deleted()
{
shutdown(true);
if(controlgate != null)
controlgate.closed();
if(multiplexgate != null)
multiplexgate.close();
if((routingService != null) && (routingService instanceof RoutingServiceMultiplexer)) {
((RoutingServiceMultiplexer) routingService).clear();
}
name = null;
routingService = null;
transferPlane = null;
host = null;
authenticationService = null;
ownIdentity = null;
controlgate = null;
multiplexgate = null;
}
private boolean repair()
{
if(isBroken) {
// we are broken, no repair
return false;
} else {
if(controlgate != null) {
controlgate.repair();
}
return true;
}
}
/**
* @return Description of capabilities of this node. This includes the
* types of gates this node is able to create.
*/
public Description getCapabilities()
{
return capabilities;
}
/**
* Sets new capabilities for this node.
* Replaces internal capabilities with the new one.
*/
public void setCapabilities(Description pCapabilities)
{
capabilities = pCapabilities;
controlgate.updateFNsCapabilties(capabilities);
}
/**
* @return Get time base for this node
*/
public EventHandler getTimeBase()
{
return as.getTimeBase();
}
/**
* @return Logger for this node
*/
public Logger getLogger()
{
return logger;
}
/**
* @return Prefix for node statistics
*/
public String getCountNodePrefix()
{
if(countPrefixCache == null) {
countPrefixCache = getClass().getName() +"." +this +".";
}
return countPrefixCache;
}
/**
* Statistic function for counting elements on a node.
*
* @param pPostfix Postfix for statistic
* @param increment Indicates if the counter should be incremented or decremented
*/
public void count(String pPostfix, boolean increment)
{
if(Config.Logging.CREATE_NODE_STATISTIC) {
StreamTime tNow = getTimeBase().nowStream();
String baseName = getCountNodePrefix() +pPostfix;
double incr = 1.0d;
if(!increment) incr = -1.0d;
IDoubleWriter tSum = SumNode.openAsWriter(baseName +".number");
tSum.write(incr, tNow);
if(increment) {
tSum = SumNode.openAsWriter(baseName +".totalSum");
tSum.write(1.0d, tNow);
}
}
}
@Override
public String toString()
{
if(name == null) return null;
else return name.toString();
}
/**
* @deprecated TODO remove it from Node/Host and move it to hierarchical plug-in
*/
public float getBullyPriority(int pLevel)
{
return mBullyPriority[pLevel];
}
public String getDecorationParameter()
{
return (String) mDecorationParameter;
}
public void setDecorationParameter(Object pDecorationParameter)
{
mDecorationParameter = pDecorationParameter;
notifyObservers();
}
@Override
public Object getValue()
{
return mLabel;
}
@Override
public void setValue(Object pLabel)
{
mLabel = pLabel;
}
@Override
public synchronized void notifyObservers(Object pEvent)
{
setChanged();
super.notifyObservers(pEvent);
}
private boolean isBroken;
private boolean isErrorTypeVisible;
private String name;
private AutonomousSystem as;
private Logger logger;
private Controller controlgate;
private Multiplexer multiplexgate;
private TransferPlane transferPlane;
private RoutingService routingService;
private IdentityManagement authenticationService;
private Identity ownIdentity;
private Host host;
private ProcessRegister processes;
private Description capabilities;
private boolean isShuttingDown;
private float [] mBullyPriority = new float[5];
private Object mDecorationParameter=null;
private Object mLabel;
private String countPrefixCache;
public static final Namespace NAMESPACE_HOST = new Namespace("host");
public static final int MAXIMUM_BULLY_PRIORITY = 90;
private ParameterMap parameters;
private final String Cap = "CAPABILITY";
}
| true | true | public Node(String pName, AutonomousSystem pAS, ParameterMap pParameters)
{
logger = new Logger(pAS.getLogger());
isBroken = false;
isShuttingDown = false;
name = pName;
as = pAS;
controlgate = new Controller(this);
transferPlane = new TransferPlane(getTimeBase(), logger);
// TODO move this stuff to hierarchical plug-in
Random tRandomGenerator = new Random(System.currentTimeMillis());
for(int i = 0; i < mBullyPriority.length; i++)
{
mBullyPriority[i] = tRandomGenerator.nextFloat() * 10;
}
Logging.log(this, "This node has priority " + mBullyPriority[0]);
parameters = pParameters;
// Note: Do not create central FN here, because we do not have
// a routing service available.
multiplexgate = null;
// set capabilities of the node
String tCap = pParameters.get(Cap, null);
if((tCap == null) || "all".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostExtended();
}
else if("half".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostBasic();
}
else if("none".equalsIgnoreCase(tCap)) {
capabilities = new Description();
}
// TEST:
// routingService = new RoutingServiceMultiplexer();
// ((RoutingServiceMultiplexer)routingService).add(new RoutingService(pRoutingService));
host = new Host(this);
authenticationService = IdentityManagement.getInstance(pAS, host);
ownIdentity = getAuthenticationService().createIdentity(name.toString());
}
| public Node(String pName, AutonomousSystem pAS, ParameterMap pParameters)
{
logger = new Logger(pAS.getLogger());
isBroken = false;
isShuttingDown = false;
name = pName;
as = pAS;
controlgate = new Controller(this);
transferPlane = new TransferPlane(getTimeBase(), logger);
// TODO move this stuff to hierarchical plug-in
Random tRandomGenerator = new Random(System.currentTimeMillis());
for(int i = 0; i < mBullyPriority.length; i++)
{
mBullyPriority[i] = tRandomGenerator.nextFloat() * 10;
}
Logging.log(this, "This node has priority " + mBullyPriority[0]);
parameters = pParameters;
// Note: Do not create central FN here, because we do not have
// a routing service available.
multiplexgate = null;
// set capabilities of the node
String tCap = getParameter().get(Cap, null);
if((tCap == null) || "all".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostExtended();
}
else if("half".equalsIgnoreCase(tCap)) {
capabilities = Description.createHostBasic();
}
else if("none".equalsIgnoreCase(tCap)) {
capabilities = new Description();
}
// TEST:
// routingService = new RoutingServiceMultiplexer();
// ((RoutingServiceMultiplexer)routingService).add(new RoutingService(pRoutingService));
host = new Host(this);
authenticationService = IdentityManagement.getInstance(pAS, host);
ownIdentity = getAuthenticationService().createIdentity(name.toString());
}
|
diff --git a/src/test/java/com/ontometrics/db/graph/CompositeTest.java b/src/test/java/com/ontometrics/db/graph/CompositeTest.java
index 7ed2cb9..a3e5ee3 100644
--- a/src/test/java/com/ontometrics/db/graph/CompositeTest.java
+++ b/src/test/java/com/ontometrics/db/graph/CompositeTest.java
@@ -1,56 +1,55 @@
package com.ontometrics.db.graph;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import com.ontometrics.db.graph.model.Employee;
import com.ontometrics.db.graph.model.Manager;
import com.ontometrics.testing.TestGraphDatabase;
/**
* To verify that we can create a class that implements the Composite Design
* Pattern and it will be persisted properly.
*
* @author Rob
*/
public class CompositeTest {
private EntityRepository<Manager> managerRepository = new EntityRepository<Manager>();
@Rule
public TemporaryFolder dbFolder = new TemporaryFolder();
@Rule
public TestGraphDatabase database = new TestGraphDatabase(dbFolder);
private EntityManager entityManager;
@Before
public void setup(){
entityManager = new EntityManager(database.getDatabase());
managerRepository.setEntityManager(entityManager);
}
@Test
public void canPersistComposite(){
Employee joe = new Employee("Joe");
Employee jim = new Employee("Jim");
Employee bob = new Employee("Bob");
Manager pete = new Manager("Pete");
pete.addSubordinate(joe);
pete.addSubordinate(jim);
pete.addSubordinate(bob);
- pete.addSubordinate(pete);
managerRepository.create(pete);
}
}
| true | true | public void canPersistComposite(){
Employee joe = new Employee("Joe");
Employee jim = new Employee("Jim");
Employee bob = new Employee("Bob");
Manager pete = new Manager("Pete");
pete.addSubordinate(joe);
pete.addSubordinate(jim);
pete.addSubordinate(bob);
pete.addSubordinate(pete);
managerRepository.create(pete);
}
| public void canPersistComposite(){
Employee joe = new Employee("Joe");
Employee jim = new Employee("Jim");
Employee bob = new Employee("Bob");
Manager pete = new Manager("Pete");
pete.addSubordinate(joe);
pete.addSubordinate(jim);
pete.addSubordinate(bob);
managerRepository.create(pete);
}
|
diff --git a/common/src/org/riotfamily/common/hibernate/ActiveRecordClassExposer.java b/common/src/org/riotfamily/common/hibernate/ActiveRecordClassExposer.java
index 8b6d33315..363e95557 100644
--- a/common/src/org/riotfamily/common/hibernate/ActiveRecordClassExposer.java
+++ b/common/src/org/riotfamily/common/hibernate/ActiveRecordClassExposer.java
@@ -1,91 +1,91 @@
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.riotfamily.common.hibernate;
import java.io.IOException;
import java.util.Collection;
import org.hibernate.EntityMode;
import org.hibernate.SessionFactory;
import org.hibernate.metadata.ClassMetadata;
import org.riotfamily.common.freemarker.ConfigurationPostProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ClassUtils;
import freemarker.ext.beans.BeansWrapper;
import freemarker.template.Configuration;
import freemarker.template.TemplateException;
import freemarker.template.TemplateHashModel;
import freemarker.template.TemplateModel;
/**
* ConfigurationPostProcessor that adds static models for all ActiveRecord
* classes under their {@link ClassUtils#getShortName(Class) short name}
* as shared variables.
* <p>
* If two ActiveRecord classes with the same name exist in different packages
* only the first one will be exposed and a warning message will be logged.
* In order to access the other (shadowed) class you can use the following
* syntax in your FreeMarker templates:
* <code>statics["com.example.MyActiveRecord"]</code>.
* </p>
*
* @see ActiveRecord
* @see BeansWrapper#getStaticModels()
* @author Felix Gnass [fgnass at neteye dot de]
* @since 8.0
*/
public class ActiveRecordClassExposer implements ConfigurationPostProcessor {
Logger log = LoggerFactory.getLogger(ActiveRecordClassExposer.class);
private SessionFactory sessionFactory;
public ActiveRecordClassExposer(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
private BeansWrapper getBeansWrapper(Configuration config) {
if (config.getObjectWrapper() instanceof BeansWrapper) {
return (BeansWrapper) config.getObjectWrapper();
}
return BeansWrapper.getDefaultInstance();
}
@SuppressWarnings("unchecked")
public void postProcessConfiguration(Configuration config)
throws IOException, TemplateException {
TemplateHashModel statics = getBeansWrapper(config).getStaticModels();
Collection<ClassMetadata> allMetadata = sessionFactory.getAllClassMetadata().values();
for (ClassMetadata meta : allMetadata) {
- Class mappedClass = meta.getMappedClass(EntityMode.POJO);
+ Class<?> mappedClass = meta.getMappedClass(EntityMode.POJO);
if (ActiveRecord.class.isAssignableFrom(mappedClass)) {
String key = ClassUtils.getShortName(mappedClass);
if (config.getSharedVariable(key) != null) {
log.warn("Another shared variable with the name '{}'" +
" already exist. Use statics[\"{}\"] in your" +
" FreeMarker templates to access the static" +
" methods of your ActiveRecord class.",
key, mappedClass.getName());
}
else {
TemplateModel tm = statics.get(mappedClass.getName());
config.setSharedVariable(key, tm);
}
}
}
}
}
| true | true | public void postProcessConfiguration(Configuration config)
throws IOException, TemplateException {
TemplateHashModel statics = getBeansWrapper(config).getStaticModels();
Collection<ClassMetadata> allMetadata = sessionFactory.getAllClassMetadata().values();
for (ClassMetadata meta : allMetadata) {
Class mappedClass = meta.getMappedClass(EntityMode.POJO);
if (ActiveRecord.class.isAssignableFrom(mappedClass)) {
String key = ClassUtils.getShortName(mappedClass);
if (config.getSharedVariable(key) != null) {
log.warn("Another shared variable with the name '{}'" +
" already exist. Use statics[\"{}\"] in your" +
" FreeMarker templates to access the static" +
" methods of your ActiveRecord class.",
key, mappedClass.getName());
}
else {
TemplateModel tm = statics.get(mappedClass.getName());
config.setSharedVariable(key, tm);
}
}
}
}
| public void postProcessConfiguration(Configuration config)
throws IOException, TemplateException {
TemplateHashModel statics = getBeansWrapper(config).getStaticModels();
Collection<ClassMetadata> allMetadata = sessionFactory.getAllClassMetadata().values();
for (ClassMetadata meta : allMetadata) {
Class<?> mappedClass = meta.getMappedClass(EntityMode.POJO);
if (ActiveRecord.class.isAssignableFrom(mappedClass)) {
String key = ClassUtils.getShortName(mappedClass);
if (config.getSharedVariable(key) != null) {
log.warn("Another shared variable with the name '{}'" +
" already exist. Use statics[\"{}\"] in your" +
" FreeMarker templates to access the static" +
" methods of your ActiveRecord class.",
key, mappedClass.getName());
}
else {
TemplateModel tm = statics.get(mappedClass.getName());
config.setSharedVariable(key, tm);
}
}
}
}
|
diff --git a/mfp-qa-v2.0/src/test/java/com/myfitnesspal/qa/test/account/PasswordInsensitivityTest.java b/mfp-qa-v2.0/src/test/java/com/myfitnesspal/qa/test/account/PasswordInsensitivityTest.java
index 88cac2b..a4f2074 100644
--- a/mfp-qa-v2.0/src/test/java/com/myfitnesspal/qa/test/account/PasswordInsensitivityTest.java
+++ b/mfp-qa-v2.0/src/test/java/com/myfitnesspal/qa/test/account/PasswordInsensitivityTest.java
@@ -1,32 +1,33 @@
package com.myfitnesspal.qa.test.account;
import org.openqa.selenium.support.PageFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.myfitnesspal.qa.foundation.BasicTestCase;
import com.myfitnesspal.qa.pages.user.LoginPage;
import com.myfitnesspal.qa.utils.rerunner.RetryAnalyzer;
public class PasswordInsensitivityTest extends BasicTestCase
{
private LoginPage loginPage;
@Test(groups = { "ui_regression" } , retryAnalyzer = RetryAnalyzer.class)
public void testPasswordInsensitivity()
{
loginPage = PageFactory.initElements(driver, LoginPage.class);
loginPage.open();
loginPage.login(mfpUser.getLogin(), "TARANTINO");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
+ loginPage.open();
loginPage.login(mfpUser.getLogin(), "tarantino");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
}
}
| true | true | public void testPasswordInsensitivity()
{
loginPage = PageFactory.initElements(driver, LoginPage.class);
loginPage.open();
loginPage.login(mfpUser.getLogin(), "TARANTINO");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
loginPage.login(mfpUser.getLogin(), "tarantino");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
}
| public void testPasswordInsensitivity()
{
loginPage = PageFactory.initElements(driver, LoginPage.class);
loginPage.open();
loginPage.login(mfpUser.getLogin(), "TARANTINO");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
loginPage.open();
loginPage.login(mfpUser.getLogin(), "tarantino");
Assert.assertTrue(loginPage.linkLogout.isDisplayed(), "Logout link doesn't present");
click("linkLogout", loginPage.linkLogout);
}
|
diff --git a/src/me/stutiguias/listeners/McpkPlayerListener.java b/src/me/stutiguias/listeners/McpkPlayerListener.java
index c9485c5..2c7f2de 100644
--- a/src/me/stutiguias/listeners/McpkPlayerListener.java
+++ b/src/me/stutiguias/listeners/McpkPlayerListener.java
@@ -1,132 +1,137 @@
package me.stutiguias.listeners;
import java.sql.Date;
import java.util.Calendar;
import java.util.Map;
import me.stutiguias.mcpk.Mcpk;
import me.stutiguias.mcpk.PK;
import org.bukkit.ChatColor;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.player.PlayerJoinEvent;
/**
*
* @author Stutiguias
*/
public class McpkPlayerListener implements Listener {
private final Mcpk plugin;
public McpkPlayerListener(Mcpk plugin){
this.plugin = plugin;
}
@EventHandler(priority = EventPriority.NORMAL)
public void PlayerDeath(PlayerDeathEvent event) {
if(!(event.getEntity().getKiller() instanceof Player)) return;
String killer = event.getEntity().getKiller().getName();
if(plugin.IsPk.containsKey(killer)){
plugin.IsPk.get(killer).setTime(plugin.getCurrentMilli() + plugin.time);
plugin.IsPk.get(killer).addKills(1);
for(Map.Entry<Integer,String> announcekills : plugin.pkmsg.entrySet())
{
if(plugin.IsPk.get(killer).getKills() == announcekills.getKey()) {
plugin.getServer().broadcastMessage(parseColor(announcekills.getValue().replace("%player%", killer)));
}
}
}else{
PK newpk = new PK();
newpk.setName(killer);
newpk.setTime(plugin.getCurrentMilli() + plugin.time);
plugin.IsPk.put(killer, newpk);
}
}
private String parseColor(String message) {
for (ChatColor color : ChatColor.values()) {
message = message.replaceAll(String.format("&%c", color.getChar()), color.toString());
}
return message;
}
@EventHandler(priority = EventPriority.HIGH, ignoreCancelled = true)
public void onDamage(EntityDamageEvent event){
if(!plugin.usenewbieprotect) return;
if(event instanceof EntityDamageByEntityEvent) {
EntityDamageByEntityEvent EDE = (EntityDamageByEntityEvent)event;
Entity attacker;
if(event.getCause() == DamageCause.PROJECTILE)
{
attacker = ((Projectile) EDE.getDamager()).getShooter();
}else {
attacker = EDE.getDamager();
}
- Entity defender = EDE.getEntity();
+ Entity defender;
+ try {
+ defender = EDE.getEntity();
+ }catch(Exception e) {
+ return;
+ }
if(attacker instanceof Player && defender instanceof Player) {
Player df = (Player)defender;
Player at = (Player)attacker;
PK dfPkPlayer = plugin.DataBase.getPlayer(df.getName());
PK atPkPlayer = plugin.DataBase.getPlayer(at.getName());
Date dt = now();
if(dt.before(dfPkPlayer.getNewBie()) || dt.before(atPkPlayer.getNewBie())) {
event.setCancelled(true);
event.setDamage(0);
}
}
}
}
@EventHandler(priority= EventPriority.NORMAL)
public void onPlayerJoin(PlayerJoinEvent event) {
Player pl = event.getPlayer();
PK pkPlayer = null;
try {
pkPlayer = plugin.DataBase.getPlayer(pl.getName());
}catch(Exception e){
e.printStackTrace();
}
if(pkPlayer == null) {
Date dt = now();
if(!plugin.usenewbieprotect) {
plugin.DataBase.createPlayer(pl.getName(), "0", 0,dt);
Mcpk.log.info("[MCPK] New Player Found " + pl.getName());
}else{
dt = addDays(dt, plugin.newbieprotectdays);
plugin.DataBase.createPlayer(pl.getName(), "0", 0,dt);
pl.sendMessage(plugin.protecmsg.replace("%d%",String.valueOf(plugin.newbieprotectdays)).replace("%date%",dt.toString()));
Mcpk.log.info("[MCPK] New Player Found " + pl.getName() + " is protected until " + dt.toString());
}
}
}
public Date addDays(Date date, int days)
{
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.DATE, days); //minus number would decrement the days
java.sql.Date dataSql = new java.sql.Date(cal.getTime().getTime());
return dataSql;
}
public Date now() {
java.util.Date dataUtil = new java.util.Date();
java.sql.Date dataSql = new java.sql.Date(dataUtil.getTime());
return dataSql;
}
}
| true | true | public void onDamage(EntityDamageEvent event){
if(!plugin.usenewbieprotect) return;
if(event instanceof EntityDamageByEntityEvent) {
EntityDamageByEntityEvent EDE = (EntityDamageByEntityEvent)event;
Entity attacker;
if(event.getCause() == DamageCause.PROJECTILE)
{
attacker = ((Projectile) EDE.getDamager()).getShooter();
}else {
attacker = EDE.getDamager();
}
Entity defender = EDE.getEntity();
if(attacker instanceof Player && defender instanceof Player) {
Player df = (Player)defender;
Player at = (Player)attacker;
PK dfPkPlayer = plugin.DataBase.getPlayer(df.getName());
PK atPkPlayer = plugin.DataBase.getPlayer(at.getName());
Date dt = now();
if(dt.before(dfPkPlayer.getNewBie()) || dt.before(atPkPlayer.getNewBie())) {
event.setCancelled(true);
event.setDamage(0);
}
}
}
}
| public void onDamage(EntityDamageEvent event){
if(!plugin.usenewbieprotect) return;
if(event instanceof EntityDamageByEntityEvent) {
EntityDamageByEntityEvent EDE = (EntityDamageByEntityEvent)event;
Entity attacker;
if(event.getCause() == DamageCause.PROJECTILE)
{
attacker = ((Projectile) EDE.getDamager()).getShooter();
}else {
attacker = EDE.getDamager();
}
Entity defender;
try {
defender = EDE.getEntity();
}catch(Exception e) {
return;
}
if(attacker instanceof Player && defender instanceof Player) {
Player df = (Player)defender;
Player at = (Player)attacker;
PK dfPkPlayer = plugin.DataBase.getPlayer(df.getName());
PK atPkPlayer = plugin.DataBase.getPlayer(at.getName());
Date dt = now();
if(dt.before(dfPkPlayer.getNewBie()) || dt.before(atPkPlayer.getNewBie())) {
event.setCancelled(true);
event.setDamage(0);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.